diff --git a/code_samples/tdf/decrypt_options.mdx b/code_samples/tdf/decrypt_options.mdx new file mode 100644 index 00000000..28715a3b --- /dev/null +++ b/code_samples/tdf/decrypt_options.mdx @@ -0,0 +1,358 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +## Decrypt Options + +The following options can be passed to the decrypt call to control how the TDF is opened and validated. + +--- + +### KAS Allowlist + +Restrict decryption to only contact KAS endpoints on an explicit allowlist. If the TDF references a KAS not on the list, decryption will fail. This is a security control to prevent credential exfiltration to a rogue KAS. + + + + +```go +import ( + "bytes" + + "github.com/opentdf/platform/sdk" +) + +tdfReader, err := client.LoadTDF( + bytes.NewReader(encryptedBytes), + sdk.WithKasAllowlist([]string{ + "https://kas.example.com", + "https://kas-backup.example.com", + }), +) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; +import io.opentdf.platform.sdk.TDF; + +Config.TDFReaderConfig readerConfig = Config.newTDFReaderConfig( + Config.WithKasAllowlist( + "https://kas.example.com", + "https://kas-backup.example.com" + ) +); +TDF.Reader reader = sdk.loadTDF(fileChannel, readerConfig); +``` + + + + +```typescript +const plaintext = await client.read({ + source: { type: 'buffer', location: encryptedBytes }, + allowedKASEndpoints: [ + 'https://kas.example.com', + 'https://kas-backup.example.com', + ], +}); +``` + + + + +--- + +### Ignore KAS Allowlist + +Disable the KAS allowlist check entirely. The SDK will contact any KAS referenced in the TDF manifest without restriction. + +:::warning Security consideration +Only use this in controlled environments (e.g., integration tests, airgapped deployments where all KAS endpoints are trusted). In production, the allowlist is an important defence against credential forwarding attacks. +::: + + + + +```go +import ( + "bytes" + + "github.com/opentdf/platform/sdk" +) + +tdfReader, err := client.LoadTDF( + bytes.NewReader(encryptedBytes), + sdk.WithIgnoreAllowlist(true), +) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; +import io.opentdf.platform.sdk.TDF; + +Config.TDFReaderConfig readerConfig = Config.newTDFReaderConfig( + Config.WithIgnoreKasAllowlist(true) +); +TDF.Reader reader = sdk.loadTDF(fileChannel, readerConfig); +``` + + + + +```typescript +const plaintext = await client.read({ + source: { type: 'buffer', location: encryptedBytes }, + ignoreAllowlist: true, +}); +``` + + + + +--- + +### Assertion Verification Keys + +Provide public keys used to verify signed assertions embedded in the TDF. If an assertion was signed during encryption, you must supply the corresponding verification key here or verification will fail. + + + + +```go +import ( + "bytes" + + "github.com/opentdf/platform/sdk" +) + +verificationKeys := sdk.AssertionVerificationKeys{ + Keys: map[string]sdk.AssertionKey{ + "assertion-1": { + Alg: sdk.AssertionKeyAlgRS256, + Key: rsaPublicKey, // *rsa.PublicKey + }, + }, +} + +tdfReader, err := client.LoadTDF( + bytes.NewReader(encryptedBytes), + sdk.WithAssertionVerificationKeys(verificationKeys), +) +``` + + + + +```java +import io.opentdf.platform.sdk.AssertionConfig; +import io.opentdf.platform.sdk.Config; +import io.opentdf.platform.sdk.TDF; +import java.util.Map; + +var assertionKey = new AssertionConfig.AssertionKey( + AssertionConfig.AssertionKeyAlg.RS256, + rsaPublicKey +); + +var verificationKeys = new Config.AssertionVerificationKeys(); +verificationKeys.keys = Map.of("assertion-1", assertionKey); + +Config.TDFReaderConfig readerConfig = Config.newTDFReaderConfig( + Config.withAssertionVerificationKeys(verificationKeys) +); +TDF.Reader reader = sdk.loadTDF(fileChannel, readerConfig); +``` + + + + +```typescript +const plaintext = await client.read({ + source: { type: 'buffer', location: encryptedBytes }, + assertionVerificationKeys: { + keys: { + 'assertion-1': { + alg: 'RS256', + key: rsaPublicKey, // CryptoKey + }, + }, + }, +}); +``` + + + + +--- + +### Disable Assertion Verification + +Skip cryptographic verification of all assertions in the TDF. The assertions will still be read and returned, but their signatures will not be checked. + +:::warning +Disabling assertion verification removes a tamper-detection layer. Only use this when you have explicitly verified the TDF's integrity through another mechanism. +::: + + + + +```go +import ( + "bytes" + + "github.com/opentdf/platform/sdk" +) + +tdfReader, err := client.LoadTDF( + bytes.NewReader(encryptedBytes), + sdk.WithDisableAssertionVerification(true), +) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; +import io.opentdf.platform.sdk.TDF; + +Config.TDFReaderConfig readerConfig = Config.newTDFReaderConfig( + Config.withDisableAssertionVerification(true) +); +TDF.Reader reader = sdk.loadTDF(fileChannel, readerConfig); +``` + + + + +```typescript +const plaintext = await client.read({ + source: { type: 'buffer', location: encryptedBytes }, + noVerify: true, +}); +``` + + + + +--- + +### Session Key Type + +During decryption, the SDK generates a short-lived (ephemeral) asymmetric key pair and sends the public half to the KAS. The KAS uses it to securely return the unwrapped Data Encryption Key back to the SDK. This option controls the algorithm used for that ephemeral key pair. + +The default is `rsa:2048`. Use `ec:secp256r1` (or another EC variant) for smaller messages and faster key exchange. Must match an algorithm supported by the KAS. + + + + +```go +import ( + "bytes" + + "github.com/opentdf/platform/lib/ocrypto" + "github.com/opentdf/platform/sdk" +) + +tdfReader, err := client.LoadTDF( + bytes.NewReader(encryptedBytes), + sdk.WithSessionKeyType(ocrypto.EC256Key), +) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; +import io.opentdf.platform.sdk.TDF; + +Config.TDFReaderConfig readerConfig = Config.newTDFReaderConfig( + Config.WithSessionKeyType(KeyType.EC256Key) +); +TDF.Reader reader = sdk.loadTDF(fileChannel, readerConfig); +``` + + + + +```typescript +const plaintext = await client.read({ + source: { type: 'buffer', location: encryptedBytes }, + wrappingKeyAlgorithm: 'ec:secp256r1', +}); +``` + + + + +--- + +### Fulfillable Obligations + +Declare which obligation FQNs the calling application is prepared to fulfil. The platform may attach obligations to an access decision — if an obligation is not declared as fulfillable, decryption may be denied. + + + + +```go +import ( + "bytes" + + "github.com/opentdf/platform/sdk" +) + +tdfReader, err := client.LoadTDF( + bytes.NewReader(encryptedBytes), + sdk.WithTDFFulfillableObligationFQNs([]string{ + "https://example.com/obl/audit/value/log", + "https://example.com/obl/watermark/value/apply", + }), +) +``` + + + + +Fulfillable obligations are not yet supported in the Java SDK TDF reader config. Configure them at the SDK level via `SDKBuilder` if available, or handle obligation enforcement in your application logic after reading the policy object. + + + + +```typescript +const plaintext = await client.read({ + source: { type: 'buffer', location: encryptedBytes }, + fulfillableObligationFQNs: [ + 'https://example.com/obl/audit/value/log', + 'https://example.com/obl/watermark/value/apply', + ], +}); +``` + + + + +--- + +### Max Manifest Size (Go only) + +Limit the maximum size of the TDF manifest that the SDK will parse. This is a defence against malformed or malicious TDFs with abnormally large manifests. + +```go +import ( + "bytes" + + "github.com/opentdf/platform/sdk" +) + +tdfReader, err := client.LoadTDF( + bytes.NewReader(encryptedBytes), + sdk.WithMaxManifestSize(1 * 1024 * 1024), // 1 MB limit +) +``` + diff --git a/code_samples/tdf/encrypt_options.mdx b/code_samples/tdf/encrypt_options.mdx new file mode 100644 index 00000000..8b941437 --- /dev/null +++ b/code_samples/tdf/encrypt_options.mdx @@ -0,0 +1,623 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +## Encrypt Options + +The following options can be passed to the encrypt call to control how the TDF is constructed. + +--- + +### Data Attributes + +Attach one or more attribute value FQNs to the TDF policy. Access to the data will be governed by these attributes — only entities that hold matching attribute values will be permitted to decrypt. + + + + +```go +import "github.com/opentdf/platform/sdk" + +manifest, err := client.CreateTDF(buf, str, + sdk.WithKasInformation(sdk.KASInfo{URL: platformEndpoint}), + sdk.WithDataAttributes( + "https://example.com/attr/clearance/value/executive", + "https://example.com/attr/department/value/engineering", + ), +) +``` + +Multiple calls to `WithDataAttributes` are additive. + + + + +```java +import io.opentdf.platform.sdk.Config; + +Config.TDFConfig config = Config.newTDFConfig( + Config.withKasInformation(kasInfo), + Config.withDataAttributes( + "https://example.com/attr/clearance/value/executive", + "https://example.com/attr/department/value/engineering" + ) +); +sdk.createTDF(inputStream, outputStream, config); +``` + +To pass fully resolved `Value` objects instead of FQN strings, use `Config.withDataAttributeValues(Value...)`. + + + + +```typescript +const tdf = await client.createZTDF({ + source: { type: 'buffer', location: data }, + defaultKASEndpoint: 'https://kas.example.com', + attributes: [ + 'https://example.com/attr/clearance/value/executive', + 'https://example.com/attr/department/value/engineering', + ], +}); +``` + + + + +:::tip Validating attributes before encrypt +Use [`validateAttributes`](/sdks/discovery#validate-attributes) to confirm that all FQNs exist on the platform before creating a TDF. This prevents silent policy mismatches. +::: + +--- + +### KAS Configuration + +Specify which [Key Access Server (KAS)](/components/key_access) will hold the key grants for this TDF. KAS is the service that enforces access policy by controlling who can unwrap the data encryption key. The KAS URL must be reachable by anyone who needs to decrypt the file. + + + + +```go +import "github.com/opentdf/platform/sdk" + +manifest, err := client.CreateTDF(buf, str, + sdk.WithKasInformation( + sdk.KASInfo{ + URL: "https://kas.example.com", + Algorithm: "ec:secp256r1", // optional; defaults to rsa:2048 + }, + ), +) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; + +var kasInfo = new Config.KASInfo(); +kasInfo.URL = "https://kas.example.com"; +kasInfo.Algorithm = "ec:secp256r1"; // optional; defaults to rsa:2048 + +Config.TDFConfig config = Config.newTDFConfig( + Config.withKasInformation(kasInfo) +); +sdk.createTDF(inputStream, outputStream, config); +``` + + + + +```typescript +const tdf = await client.createZTDF({ + source: { type: 'buffer', location: data }, + defaultKASEndpoint: 'https://kas.example.com', + attributes: ['https://example.com/attr/clearance/value/executive'], +}); +``` + +The `defaultKASEndpoint` is used when `autoconfigure` is `false` or when no KAS is resolved from the attribute policy. + + + + +--- + +### Autoconfigure + +When enabled, the SDK queries the platform's attribute service to automatically determine which KAS endpoints should hold grants for the given data attributes. This eliminates the need to hardcode KAS URLs per encrypt call. + + + + +```go +import "github.com/opentdf/platform/sdk" + +manifest, err := client.CreateTDF(buf, str, + sdk.WithDataAttributes("https://example.com/attr/clearance/value/executive"), + sdk.WithAutoconfigure(true), +) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; + +Config.TDFConfig config = Config.newTDFConfig( + Config.withDataAttributes("https://example.com/attr/clearance/value/executive"), + Config.withAutoconfigure(true) +); +sdk.createTDF(inputStream, outputStream, config); +``` + + + + +```typescript +const tdf = await client.createZTDF({ + source: { type: 'buffer', location: data }, + attributes: ['https://example.com/attr/clearance/value/executive'], + autoconfigure: true, +}); +``` + + + + +:::note +Autoconfigure requires the SDK to be connected to a platform with the attribute service running. If the platform is unavailable or the attribute has no KAS binding, the encrypt call will fail. +::: + +--- + +### Key Splitting + +Split the data encryption key across multiple KAS instances. The key can only be reconstructed when all participating KAS endpoints respond. This is used for multi-party access control or defence-in-depth key custody. + + + + +Provide multiple `KASInfo` entries with distinct `URL` values. The SDK splits the key across all provided KAS endpoints. + +```go +import "github.com/opentdf/platform/sdk" + +manifest, err := client.CreateTDF(buf, str, + sdk.WithKasInformation( + sdk.KASInfo{URL: "https://kas-a.example.com"}, + sdk.KASInfo{URL: "https://kas-b.example.com"}, + ), +) +``` + +To share a key segment (i.e., not split), provide the same `URL` more than once or use `WithAutoconfigure`. + + + + +Use `Config.withSplitPlan` to declare explicit key-split steps. Each step names a KAS URL and a split ID. + +```java +import io.opentdf.platform.sdk.Autoconfigure; +import io.opentdf.platform.sdk.Config; + +Config.TDFConfig config = Config.newTDFConfig( + Config.withKasInformation(kasInfoA, kasInfoB), + Config.withSplitPlan( + new Autoconfigure.KeySplitStep("https://kas-a.example.com", "split-1"), + new Autoconfigure.KeySplitStep("https://kas-b.example.com", "split-2") + ) +); +sdk.createTDF(inputStream, outputStream, config); +``` + + + + +```typescript +const tdf = await client.createZTDF({ + source: { type: 'buffer', location: data }, + splitPlan: [ + { kas: 'https://kas-a.example.com', sid: 'split-1' }, + { kas: 'https://kas-b.example.com', sid: 'split-2' }, + ], +}); +``` + +Steps with the same `sid` share a key segment. Steps with different `sid` values represent independent splits that are all required for decryption. + + + + +--- + +### Metadata + +Attach a plaintext metadata string to the TDF. Metadata is stored unencrypted in the TDF manifest and is readable by anyone with access to the file — do not store sensitive values here. Common uses include audit labels, content identifiers, or application-specific tags. + + + + +```go +import "github.com/opentdf/platform/sdk" + +manifest, err := client.CreateTDF(buf, str, + sdk.WithKasInformation(sdk.KASInfo{URL: platformEndpoint}), + sdk.WithMetaData(`{"owner":"alice","sensitivity":"high"}`), +) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; + +Config.TDFConfig config = Config.newTDFConfig( + Config.withKasInformation(kasInfo), + Config.withMetaData("{\"owner\":\"alice\",\"sensitivity\":\"high\"}") +); +sdk.createTDF(inputStream, outputStream, config); +``` + + + + +```typescript +const tdf = await client.createZTDF({ + source: { type: 'buffer', location: data }, + defaultKASEndpoint: 'https://kas.example.com', + metadata: { owner: 'alice', sensitivity: 'high' }, +}); +``` + +:::warning Deprecated +The `metadata` field is deprecated in the JavaScript SDK. Use `assertionConfigs` for bound, verifiable metadata instead. +::: + + + + +--- + +### MIME Type + +Declare the content type of the plaintext payload. This is stored in the TDF manifest and can help receiving applications handle the decrypted content correctly. + + + + +```go +import "github.com/opentdf/platform/sdk" + +manifest, err := client.CreateTDF(buf, str, + sdk.WithKasInformation(sdk.KASInfo{URL: platformEndpoint}), + sdk.WithMimeType("application/pdf"), +) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; + +Config.TDFConfig config = Config.newTDFConfig( + Config.withKasInformation(kasInfo), + Config.withMimeType("application/pdf") +); +sdk.createTDF(inputStream, outputStream, config); +``` + + + + +```typescript +const tdf = await client.createZTDF({ + source: { type: 'buffer', location: data }, + defaultKASEndpoint: 'https://kas.example.com', + mimeType: 'application/pdf', +}); +``` + + + + +--- + +### Segment Size + +TDF payloads are split into encrypted segments. The segment size controls the trade-off between memory use and seek performance during decryption. The default is **2 MB**. Larger segments reduce overhead for streaming workloads; smaller segments allow faster random access. + + + + +```go +import "github.com/opentdf/platform/sdk" + +const oneMB = 1 * 1024 * 1024 + +manifest, err := client.CreateTDF(buf, str, + sdk.WithKasInformation(sdk.KASInfo{URL: platformEndpoint}), + sdk.WithSegmentSize(oneMB), +) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; + +int oneMB = 1 * 1024 * 1024; + +Config.TDFConfig config = Config.newTDFConfig( + Config.withKasInformation(kasInfo), + Config.withSegmentSize(oneMB) +); +sdk.createTDF(inputStream, outputStream, config); +``` + + + + +```typescript +const oneMB = 1 * 1024 * 1024; + +const tdf = await client.createZTDF({ + source: { type: 'buffer', location: data }, + defaultKASEndpoint: 'https://kas.example.com', + windowSize: oneMB, +}); +``` + + + + +--- + +### Assertions + +Assertions are signed statements attached to the TDF that carry structured metadata — audit labels, handling instructions, content identifiers — and can be cryptographically verified on decrypt. + + + + +```go +import "github.com/opentdf/platform/sdk" + +assertionCfg := sdk.AssertionConfig{ + ID: "assertion-1", + Type: sdk.HandlingAssertion, + Scope: sdk.TrustedDataObj, + AppliesToState: sdk.Unencrypted, + Statement: sdk.Statement{ + Format: "application/json", + Value: `{"owner":"alice","sensitivity":"high"}`, + }, +} + +manifest, err := client.CreateTDF(buf, str, + sdk.WithKasInformation(sdk.KASInfo{URL: platformEndpoint}), + sdk.WithAssertions(assertionCfg), +) +``` + +To sign the assertion so it can be verified on decrypt, add a `SigningKey`: + +```go +import ( + "crypto/rsa" + + "github.com/opentdf/platform/sdk" +) + +assertionCfg := sdk.AssertionConfig{ + ID: "assertion-1", + Type: sdk.HandlingAssertion, + Scope: sdk.TrustedDataObj, + AppliesToState: sdk.Unencrypted, + Statement: sdk.Statement{ + Format: "application/json", + Value: `{"owner":"alice","sensitivity":"high"}`, + }, + SigningKey: sdk.AssertionKey{ + Alg: sdk.AssertionKeyAlgRS256, + Key: rsaPrivateKey, // *rsa.PrivateKey + }, +} +``` + + + + +```java +import io.opentdf.platform.sdk.AssertionConfig; +import io.opentdf.platform.sdk.Config; + +var statement = new AssertionConfig.Statement(); +statement.format = "application/json"; +statement.value = "{\"owner\":\"alice\",\"sensitivity\":\"high\"}"; + +var assertionCfg = new AssertionConfig(); +assertionCfg.id = "assertion-1"; +assertionCfg.type = AssertionConfig.Type.HandlingAssertion; +assertionCfg.scope = AssertionConfig.Scope.TrustedDataObj; +assertionCfg.appliesToState = AssertionConfig.AppliesToState.Unencrypted; +assertionCfg.statement = statement; + +Config.TDFConfig config = Config.newTDFConfig( + Config.withKasInformation(kasInfo), + Config.withAssertionConfig(assertionCfg) +); +sdk.createTDF(inputStream, outputStream, config); +``` + +To sign the assertion, set `assertionCfg.signingKey`: + +```java +import io.opentdf.platform.sdk.AssertionConfig; + +var signingKey = new AssertionConfig.AssertionKey( + AssertionConfig.AssertionKeyAlg.RS256, + rsaPrivateKey // java.security.PrivateKey +); +assertionCfg.signingKey = signingKey; +``` + + + + +```typescript +const tdf = await client.createZTDF({ + source: { type: 'buffer', location: data }, + defaultKASEndpoint: 'https://kas.example.com', + assertionConfigs: [ + { + id: 'assertion-1', + type: 'HandlingAssertion', + scope: 'TrustedDataObj', + appliesToState: 'Unencrypted', + statement: { + format: 'application/json', + value: '{"owner":"alice","sensitivity":"high"}', + }, + }, + ], +}); +``` + +To sign the assertion, add a `signers` map keyed by assertion ID: + +```typescript +const tdf = await client.createZTDF({ + source: { type: 'buffer', location: data }, + defaultKASEndpoint: 'https://kas.example.com', + assertionConfigs: [ + { + id: 'assertion-1', + type: 'HandlingAssertion', + scope: 'TrustedDataObj', + appliesToState: 'Unencrypted', + statement: { format: 'application/json', value: '{"owner":"alice"}' }, + }, + ], + signers: { + 'assertion-1': rsaPrivateKey, // CryptoKey (RS256) + }, +}); +``` + + + + +Signed assertions can be verified on decrypt using [Assertion Verification Keys](#assertion-verification-keys). + +--- + +### Wrapping Key Algorithm + +When a TDF is created, the SDK generates a random symmetric Data Encryption Key (DEK) to encrypt the payload. The DEK is then asymmetrically encrypted ("wrapped") using the KAS's public key, so that only the KAS can unwrap it during decryption. This option controls which asymmetric algorithm is used for that wrapping step. + +The default is `rsa:2048` — RSA with a 2048-bit key. EC (elliptic curve) algorithms such as `ec:secp256r1` offer equivalent security with smaller key material stored in the TDF manifest and faster wrap/unwrap operations. + + + + +```go +import ( + "github.com/opentdf/platform/lib/ocrypto" + "github.com/opentdf/platform/sdk" +) + +manifest, err := client.CreateTDF(buf, str, + sdk.WithKasInformation(sdk.KASInfo{URL: platformEndpoint}), + sdk.WithWrappingKeyAlg(ocrypto.EC256Key), +) +``` + +Valid values: `ocrypto.RSA2048Key`, `ocrypto.EC256Key`, `ocrypto.EC384Key`, `ocrypto.EC521Key`. + + + + +```java +import io.opentdf.platform.sdk.Config; + +Config.TDFConfig config = Config.newTDFConfig( + Config.withKasInformation(kasInfo), + Config.WithWrappingKeyAlg(KeyType.EC256Key) +); +sdk.createTDF(inputStream, outputStream, config); +``` + +Valid values: `KeyType.RSA2048Key`, `KeyType.EC256Key`, `KeyType.EC384Key`, `KeyType.EC521Key`. + + + + +```typescript +const tdf = await client.createZTDF({ + source: { type: 'buffer', location: data }, + defaultKASEndpoint: 'https://kas.example.com', + wrappingKeyAlgorithm: 'ec:secp256r1', +}); +``` + +Valid values: `'rsa:2048'`, `'rsa:4096'`, `'ec:secp256r1'`, `'ec:secp384r1'`, `'ec:secp521r1'`. + + + + +:::note KAS support required +The KAS must support the chosen algorithm. Check your platform's KAS public key endpoint to confirm which algorithms are available. +::: + +--- + +### TDF Spec Version + +Target a specific TDF specification version. Use this to maintain compatibility with older decoders or to opt in to newer features. The current default is `4.3.0`. + + + + +```go +import "github.com/opentdf/platform/sdk" + +manifest, err := client.CreateTDF(buf, str, + sdk.WithKasInformation(sdk.KASInfo{URL: platformEndpoint}), + sdk.WithTargetMode("4.3.0"), +) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; + +Config.TDFConfig config = Config.newTDFConfig( + Config.withKasInformation(kasInfo), + Config.withTargetMode("4.3.0") +); +sdk.createTDF(inputStream, outputStream, config); +``` + + + + +```typescript +const tdf = await client.createZTDF({ + source: { type: 'buffer', location: data }, + defaultKASEndpoint: 'https://kas.example.com', + tdfSpecVersion: '4.3.0', +}); +``` + +Valid values: `'4.2.2'`, `'4.3.0'`. + + + diff --git a/code_samples/tdf/encryption_ztdf.mdx b/code_samples/tdf/encryption_ztdf.mdx index 5b42ce8c..bc5a0fb1 100644 --- a/code_samples/tdf/encryption_ztdf.mdx +++ b/code_samples/tdf/encryption_ztdf.mdx @@ -202,7 +202,7 @@ async function main() { const attributes: string[] = []; // Option 2: With attributes (requires proper attribute configuration on platform) - // const attributes = ["http://example.com/attr/classification/value/secret"]; + // const attributes = ["http://example.com/attr/clearance/value/executive"]; // Create temporary files const tempDir = os.tmpdir(); diff --git a/docs/sdks/authorization.mdx b/docs/sdks/authorization.mdx index 5f3bb172..15c0dd3d 100644 --- a/docs/sdks/authorization.mdx +++ b/docs/sdks/authorization.mdx @@ -186,8 +186,8 @@ func getEntitlementsV1(client *sdk.SDK) { // Query with multiple resource attributes to understand scope ResourceAttributes: []*authorization.ResourceAttribute{{ AttributeValueFqns: []string{ - "https://company.com/attr/classification/value/public", - "https://company.com/attr/classification/value/confidential", + "https://company.com/attr/clearance/value/public", + "https://company.com/attr/clearance/value/confidential", }, }}, }} @@ -411,7 +411,7 @@ func getDecisionV2(client *sdk.SDK) { Resource: &authorizationv2.Resource_AttributeValues_{ AttributeValues: &authorizationv2.Resource_AttributeValues{ Fqns: []string{ - "https://company.com/attr/classification/value/confidential", + "https://company.com/attr/clearance/value/confidential", "https://company.com/attr/department/value/finance", }, }, @@ -457,7 +457,7 @@ func getDecisionV1(client *sdk.SDK) { }}, ResourceAttributes: []*authorization.ResourceAttribute{{ AttributeValueFqns: []string{ - "https://company.com/attr/classification/value/confidential", + "https://company.com/attr/clearance/value/confidential", "https://company.com/attr/department/value/finance", }, }}, @@ -514,7 +514,7 @@ public void getDecision(SDK sdk) throws ExecutionException, InterruptedException Resource.newBuilder() .setAttributeValues( Resource.AttributeValues.newBuilder() - .addFqns("https://company.com/attr/classification/value/confidential") + .addFqns("https://company.com/attr/clearance/value/confidential") .addFqns("https://company.com/attr/department/value/finance") ) ) @@ -558,7 +558,7 @@ async function getDecision(sdk) { resource: { attributeValues: { fqns: [ - 'https://company.com/attr/classification/value/confidential', + 'https://company.com/attr/clearance/value/confidential', 'https://company.com/attr/department/value/finance' ] } @@ -856,7 +856,7 @@ func getDecisionWithTokenV2(client *sdk.SDK, jwtToken string) { Resource: &authorizationv2.Resource{ Resource: &authorizationv2.Resource_AttributeValues_{ AttributeValues: &authorizationv2.Resource_AttributeValues{ - Fqns: []string{"https://company.com/attr/classification/value/public"}, + Fqns: []string{"https://company.com/attr/clearance/value/public"}, }, }, }, @@ -895,7 +895,7 @@ func getDecisionWithTokenV1(client *sdk.SDK, jwtToken string) { }}, }}, ResourceAttributes: []*authorization.ResourceAttribute{{ - AttributeValueFqns: []string{"https://company.com/attr/classification/value/public"}, + AttributeValueFqns: []string{"https://company.com/attr/clearance/value/public"}, }}, }} @@ -942,7 +942,7 @@ public void getDecisionWithToken(SDK sdk, String jwtToken) throws ExecutionExcep Resource.newBuilder() .setAttributeValues( Resource.AttributeValues.newBuilder() - .addFqns("https://company.com/attr/classification/value/public") + .addFqns("https://company.com/attr/clearance/value/public") ) ) .build(); @@ -973,7 +973,7 @@ async function getDecisionWithToken(sdk, jwtToken) { }, resource: { attributeValues: { - fqns: ['https://company.com/attr/classification/value/public'] + fqns: ['https://company.com/attr/clearance/value/public'] } } }; diff --git a/docs/sdks/discovery.mdx b/docs/sdks/discovery.mdx index b4d85cef..2736ad59 100644 --- a/docs/sdks/discovery.mdx +++ b/docs/sdks/discovery.mdx @@ -200,13 +200,13 @@ Checks that a list of attribute value FQNs exist on the platform **before** call ```go fqns := []string{ "https://opentdf.io/attr/department/value/marketing", - "https://opentdf.io/attr/clearance/value/secret", + "https://opentdf.io/attr/clearance/value/executive", } if err := client.ValidateAttributes(ctx, fqns...); err != nil { log.Fatalf("attribute validation failed: %v", err) // err will name the specific FQNs that are missing, e.g.: - // "attribute not found: https://opentdf.io/attr/clearance/value/secret" + // "attribute not found: https://opentdf.io/attr/clearance/value/executive" } // Safe to encrypt — all attributes confirmed present @@ -222,7 +222,7 @@ _, err = client.CreateTDF(encryptedBuffer, dataReader, ```java List fqns = List.of( "https://opentdf.io/attr/department/value/marketing", - "https://opentdf.io/attr/clearance/value/secret" + "https://opentdf.io/attr/clearance/value/executive" ); try { @@ -230,7 +230,7 @@ try { } catch (SDK.AttributeNotFoundException e) { System.err.println("attribute validation failed: " + e.getMessage()); // getMessage() names the specific FQNs that are missing, e.g.: - // "attribute not found: https://opentdf.io/attr/clearance/value/secret" + // "attribute not found: https://opentdf.io/attr/clearance/value/executive" } // Safe to encrypt — all attributes confirmed present @@ -249,7 +249,7 @@ import { validateAttributes, AttributeNotFoundError } from '@opentdf/sdk'; const fqns = [ 'https://opentdf.io/attr/department/value/marketing', - 'https://opentdf.io/attr/clearance/value/secret', + 'https://opentdf.io/attr/clearance/value/executive', ]; try { @@ -258,7 +258,7 @@ try { if (e instanceof AttributeNotFoundError) { console.error('attribute validation failed:', e.message); // e.message names the specific FQNs that are missing, e.g.: - // "attribute not found: https://opentdf.io/attr/clearance/value/secret" + // "attribute not found: https://opentdf.io/attr/clearance/value/executive" } } diff --git a/docs/sdks/tdf.mdx b/docs/sdks/tdf.mdx index af082cdf..b2399dd1 100644 --- a/docs/sdks/tdf.mdx +++ b/docs/sdks/tdf.mdx @@ -2,9 +2,1102 @@ sidebar_position: 4 --- -import EncryptionTDF from '../../code_samples/tdf/encryption_ztdf.mdx' +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; +import EncryptOptions from '../../code_samples/tdf/encrypt_options.mdx' +import DecryptOptions from '../../code_samples/tdf/decrypt_options.mdx' +# TDF -# Creating TDFs +A **TDF (Trusted Data Format)** wraps a payload in a cryptographic envelope that binds access control policy directly to the data. The TDF contains an encrypted payload, a manifest describing the policy and key access configuration, and optional assertions. - \ No newline at end of file +This page covers the core TDF operations: + +- **[CreateTDF](#createtdf)** — encrypt a payload and write a TDF +- **[LoadTDF](#loadtdf)** — open a TDF and access the plaintext +- **[IsValidTdf](#isvalidtdf)** — check whether a stream is a valid TDF without decrypting +- **[BulkDecrypt](#bulkdecrypt-go)** — decrypt multiple TDFs in one call *(Go only)* +- **[TDF Reader](#tdf-reader)** — methods on the reader object returned by `LoadTDF` +- **[Encrypt Options](#encrypt-options)** — full option reference for `CreateTDF` +- **[Decrypt Options](#decrypt-options)** — full option reference for `LoadTDF` +- **[Type Reference](#type-reference)** — `KASInfo`, `PolicyObject`, `Manifest` + +--- + +## Quick Start + +Initialize a client and run an end-to-end encrypt/decrypt in one block. + + + + +```go +import ( + "bytes" + "fmt" + "log" + "strings" + + "github.com/opentdf/platform/sdk" +) + +client, err := sdk.New( + "https://platform.example.com", + sdk.WithClientCredentials("client-id", "client-secret", nil), +) +if err != nil { + log.Fatal(err) +} +defer client.Close() + +// Encrypt +var buf bytes.Buffer +_, err = client.CreateTDF(&buf, strings.NewReader("hello, world"), + sdk.WithKasInformation(sdk.KASInfo{URL: "https://platform.example.com"}), +) +if err != nil { + log.Fatal(err) +} + +// Decrypt +tdfReader, err := client.LoadTDF(bytes.NewReader(buf.Bytes())) +if err != nil { + log.Fatal(err) +} +var plaintext bytes.Buffer +if _, err = tdfReader.WriteTo(&plaintext); err != nil { + log.Fatal(err) +} +fmt.Println(plaintext.String()) // "hello, world" +``` + + + + +```java +import io.opentdf.platform.sdk.SDK; +import io.opentdf.platform.sdk.SDKBuilder; +import io.opentdf.platform.sdk.Config; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.channels.FileChannel; +import java.nio.file.StandardOpenOption; + +SDK sdk = SDKBuilder.newBuilder() + .platformEndpoint("https://platform.example.com") + .clientSecret("client-id", "client-secret") + .build(); + +// Encrypt +var kasInfo = new Config.KASInfo(); +kasInfo.URL = "https://platform.example.com/kas"; +var config = Config.newTDFConfig(Config.withKasInformation(kasInfo)); + +var input = new ByteArrayInputStream("hello, world".getBytes(StandardCharsets.UTF_8)); +var output = new ByteArrayOutputStream(); +sdk.createTDF(input, output, config); + +// Decrypt +Path tmp = Files.createTempFile("example", ".tdf"); +Files.write(tmp, output.toByteArray()); +try (FileChannel ch = FileChannel.open(tmp, StandardOpenOption.READ)) { + var reader = sdk.loadTDF(ch, Config.newTDFReaderConfig()); + var result = new ByteArrayOutputStream(); + reader.readPayload(result); + System.out.println(result.toString(StandardCharsets.UTF_8)); // "hello, world" +} +``` + + + + +```typescript +import { OpenTDF, AuthProviders } from '@opentdf/sdk'; + +const authProvider = await AuthProviders.clientCredentials({ + clientId: 'client-id', + clientSecret: 'client-secret', + oidcOrigin: 'https://platform.example.com/auth/realms/opentdf', +}); + +const client = new OpenTDF({ authProvider, platformUrl: 'https://platform.example.com' }); + +// Encrypt +const enc = new TextEncoder(); +const tdfStream = await client.createZTDF({ + source: { type: 'buffer', location: enc.encode('hello, world') }, + defaultKASEndpoint: 'https://platform.example.com/kas', +}); + +// Decrypt +const encrypted = new Uint8Array(await new Response(tdfStream).arrayBuffer()); +const stream = await client.read({ + source: { type: 'buffer', location: encrypted }, +}); +console.log(await new Response(stream).text()); // "hello, world" +``` + + + + +--- + +## CreateTDF + +Encrypts a plaintext payload and writes a TDF to the provided output destination. + +**Signature** + + + + +```go +func (client *SDK) CreateTDF(out io.Writer, plaintext io.Reader, opts ...TDFOption) (*Manifest, error) +``` + + + + +```java +void SDK.createTDF(InputStream plaintext, OutputStream out, TDFConfig config) + throws IOException, AutoConfigureException +``` + + + + +```typescript +async createZTDF(options: CreateZTDFOptions): Promise +``` + + + + +**Parameters** + +| Parameter | Required | Description | +|-----------|----------|-------------| +| Output destination | Required | Where the encrypted TDF bytes are written. In Go and Java, pass an explicit writer/stream. In JavaScript, the encrypted stream is returned from the call. | +| Plaintext source | Required | The data to encrypt. In Go: `io.Reader`. In Java: `InputStream`. In JavaScript: `source` in the options object. | +| Configuration | Required* | Encryption options. A KAS endpoint must be specified unless autoconfigure is enabled. See [Encrypt Options](#encrypt-options). | + +**Returns** + + + + +`(*Manifest, error)` — The manifest of the created TDF on success. See [Manifest Object](#manifest-object) for field reference. Returns a non-nil `error` on failure. + + + + +`void` — Encrypted bytes are written to the output stream. Throws on failure. + + + + +`Promise` — A readable stream of encrypted TDF bytes. + + + + +**Errors** + + + + +| Error | Cause | +|-------|-------| +| KAS unreachable | The KAS could not be contacted to wrap the encryption key. | +| No KAS configured | No KAS was provided and autoconfigure is disabled or could not resolve one. | +| Write failure | An I/O error occurred writing to `out`. | + + + + +| Exception | Cause | +|-----------|-------| +| `IOException` | I/O failure reading from the input stream or writing to the output stream. | +| `AutoConfigureException` | The SDK could not resolve a KAS from the attribute service. | + + + + +Rejects with `Error` if the KAS is unreachable, authentication fails, or the source data cannot be read. + + + + +**Example** + + + + +```go +import ( + "bytes" + "log" + "strings" + + "github.com/opentdf/platform/sdk" +) + +var buf bytes.Buffer +manifest, err := client.CreateTDF(&buf, strings.NewReader("Sensitive data"), + sdk.WithKasInformation(sdk.KASInfo{URL: "https://kas.example.com"}), +) +if err != nil { + log.Fatal(err) +} +``` + + + + +```java +import io.opentdf.platform.sdk.Config; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; + +var kasInfo = new Config.KASInfo(); +kasInfo.URL = "https://kas.example.com"; + +var config = Config.newTDFConfig(Config.withKasInformation(kasInfo)); + +var in = new ByteArrayInputStream("Sensitive data".getBytes(StandardCharsets.UTF_8)); +var out = new ByteArrayOutputStream(); +sdk.createTDF(in, out, config); +``` + + + + +```typescript +const tdf = await client.createZTDF({ + source: { type: 'buffer', location: new TextEncoder().encode('Sensitive data') }, + defaultKASEndpoint: 'https://kas.example.com', +}); +const encrypted = await new Response(tdf).bytes(); +``` + + + + +See [Encrypt Options](#encrypt-options) for the full list of configuration options. + +--- + +## LoadTDF + +Opens an encrypted TDF and returns a [TDF reader](#tdf-reader) that provides access to the plaintext payload and manifest data. + +**Signature** + + + + +```go +func (client *SDK) LoadTDF(reader io.ReadSeeker, opts ...TDFReaderOption) (*Reader, error) +``` + + + + +```java +TDF.Reader SDK.loadTDF(ReadableByteChannel channel, TDFReaderConfig config) throws IOException +``` + + + + +```typescript +// Returns the plaintext payload directly: +async read(options: ReadOptions): Promise + +// Returns a reader for lazy inspection or decryption: +open(options: ReadOptions): TDFReader +``` + + + + +**Parameters** + +| Parameter | Required | Description | +|-----------|----------|-------------| +| Encrypted TDF source | Required | The TDF to open. In Go: `io.ReadSeeker`. In Java: `ReadableByteChannel`. In JavaScript: `source` in the options object (see below). | +| Configuration | Optional | Decryption options. Defaults allow any KAS endpoint. See [Decrypt Options](#decrypt-options). | + +**JavaScript `source` types** + +| `type` | `location` | Description | +|--------|-----------|-------------| +| `'buffer'` | `Uint8Array` | In-memory bytes. Use when you have already loaded the TDF into memory. | +| `'stream'` | `ReadableStream` | A Web Streams API `ReadableStream`. Use for large files or piped data. | + +**Returns** + + + + +`(*Reader, error)` — A TDF reader object. See [TDF Reader](#tdf-reader) for available methods. + + + + +`TDF.Reader` — A reader object. See [TDF Reader](#tdf-reader) for available methods. + + + + +- `read()` → `Promise` — A stream of plaintext bytes with `.metadata` attached. +- `open()` → [`TDFReader`](#tdf-reader) — A lazy reader with `.decrypt()`, `.manifest()`, and `.attributes()` methods. + + + + +**Errors** + + + + +| Error | Cause | +|-------|-------| +| Invalid TDF | The input is not a valid TDF. Use [`IsValidTdf`](#isvalidtdf) to pre-validate. | +| KAS unreachable | The KAS referenced in the manifest cannot be contacted. | +| Access denied | The caller's credentials do not satisfy the TDF policy. | +| KAS not allowlisted | The TDF references a KAS endpoint not on the allowlist. | + + + + +Java throws `IOException` for all failure modes. Inspect `e.getMessage()` to distinguish between access denied, unreachable KAS, and malformed TDF — the message text reflects the underlying cause. + + + + +Rejects with `Error` if the TDF is invalid, the KAS is unreachable, access is denied, or the endpoint is not allowlisted. + + + + +**Example** + + + + +```go +import ( + "bytes" + "fmt" + "log" +) + +tdfReader, err := client.LoadTDF(bytes.NewReader(encryptedBytes)) +if err != nil { + log.Fatal(err) +} + +var plaintext bytes.Buffer +if _, err = tdfReader.WriteTo(&plaintext); err != nil { + log.Fatal(err) +} +fmt.Println(plaintext.String()) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; +import io.opentdf.platform.sdk.TDF; +import java.io.ByteArrayOutputStream; +import java.nio.channels.FileChannel; +import java.nio.charset.StandardCharsets; +import java.nio.file.StandardOpenOption; + +try (var channel = FileChannel.open(tdfPath, StandardOpenOption.READ)) { + TDF.Reader reader = sdk.loadTDF(channel, Config.newTDFReaderConfig()); + var output = new ByteArrayOutputStream(); + reader.readPayload(output); + System.out.println(output.toString(StandardCharsets.UTF_8)); +} +``` + + + + +```typescript +const stream = await client.read({ + source: { type: 'buffer', location: encryptedBytes }, +}); +const text = await new Response(stream).text(); +console.log(text); +``` + + + + +See [Decrypt Options](#decrypt-options) for the full list of configuration options. +See [TDF Reader](#tdf-reader) for all methods on the reader object. +See [PolicyObject](#policyobject) and [Manifest Object](#manifest-object) for the types returned by reader methods. + +--- + +## IsValidTdf + +Checks whether a byte stream contains a valid TDF without decrypting it. The stream position is restored after the check. + +**Signature** + + + + +```go +func IsValidTdf(reader io.ReadSeeker) (bool, error) +``` + +This is a package-level function in the `sdk` package, not a method on the client. + + + + +```java +static boolean SDK.isTDF(ReadableByteChannel channel) throws IOException +``` + + + + +Not available as a standalone function. Pass the source to `client.open()` and catch the error on `.manifest()` or `.decrypt()`. + + + + +**Parameters** + +| Parameter | Required | Description | +|-----------|----------|-------------| +| `reader` / `channel` | Required | The data to inspect. The stream position is reset after the check, so the same reader can be passed directly to `LoadTDF`. | + +**Returns** + + + + +`(bool, error)` — `true` if the data is a valid TDF, `false` if it is not. A non-nil `error` indicates an I/O failure, not an invalid TDF. + + + + +`boolean` — `true` if the channel contains a valid TDF. Throws `IOException` on read failure. + + + + +**Example** + + + + +```go +import ( + "bytes" + "fmt" + "log" + + "github.com/opentdf/platform/sdk" +) + +reader := bytes.NewReader(data) +valid, err := sdk.IsValidTdf(reader) +if err != nil { + log.Fatal(err) +} +if !valid { + fmt.Println("Not a valid TDF") + return +} +// reader position is reset — pass directly to LoadTDF +tdfReader, err := client.LoadTDF(reader) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; +import io.opentdf.platform.sdk.SDK; +import io.opentdf.platform.sdk.TDF; +import java.nio.channels.FileChannel; +import java.nio.file.StandardOpenOption; + +try (var channel = FileChannel.open(tdfPath, StandardOpenOption.READ)) { + if (!SDK.isTDF(channel)) { + System.out.println("Not a valid TDF"); + return; + } +} +// Open a fresh channel for decryption +try (var channel = FileChannel.open(tdfPath, StandardOpenOption.READ)) { + TDF.Reader reader = sdk.loadTDF(channel, Config.newTDFReaderConfig()); + // ... +} +``` + + + + +:::note Java: inspect manifest without decrypting +Java provides static methods to read the manifest and policy from a TDF without performing key access or decrypting the payload: + +```java +import io.opentdf.platform.sdk.PolicyObject; +import io.opentdf.platform.sdk.SDK; +import java.nio.channels.FileChannel; +import java.nio.file.StandardOpenOption; + +try (var channel = FileChannel.open(tdfPath, StandardOpenOption.READ)) { + Manifest manifest = SDK.readManifest(channel); + System.out.println("TDF version: " + manifest.tdfVersion); + + PolicyObject policy = SDK.decodePolicyObject(manifest); + System.out.println("Policy UUID: " + policy.uuid); + for (var attr : policy.body.dataAttributes) { + System.out.println("Attribute: " + attr.attribute); + } +} +``` +::: + +--- + +## BulkDecrypt (Go) + +Decrypts multiple TDFs in a single operation, batching KAS key rewrap requests to reduce round-trip overhead. + +**Signature** + +```go +func (client *SDK) BulkDecrypt(ctx context.Context, opts ...BulkDecryptOption) error +func (client *SDK) PrepareBulkDecrypt(ctx context.Context, opts ...BulkDecryptOption) (*PreparedBulkDecrypt, error) +``` + +**Parameters** + +| Parameter | Required | Description | +|-----------|----------|-------------| +| `ctx` | Required | Context for the operation, used for cancellation and deadlines. | +| `opts` | Required | At minimum, `sdk.WithTDFs(tdfs...)` must be provided. | + +`sdk.WithTDFs` accepts one or more `*sdk.BulkTDF` structs: + +| Field | Type | Description | +|-------|------|-------------| +| `Reader` | `io.ReadSeeker` | Source of the encrypted TDF. | +| `Writer` | `io.Writer` | Destination for the decrypted plaintext. | + +Use `sdk.WithBulkKasAllowlist([]string{...})` to restrict which KAS endpoints may be contacted. + +**Returns** + +- `BulkDecrypt` returns `error`. If individual TDFs fail, extract per-TDF errors using `sdk.FromBulkErrors(err)`. +- `PrepareBulkDecrypt` returns `(*PreparedBulkDecrypt, error)`. Call `prepared.BulkDecrypt(ctx)` to execute after inspecting the prepared request. + +**Errors** + +Individual TDF failures are collected into a single bulk error. Use `sdk.FromBulkErrors(err)` to extract a `[]error` slice, indexed by TDF position. + +**Example** + +```go +import ( + "bytes" + "context" + "fmt" + + "github.com/opentdf/platform/sdk" +) + +ctx := context.Background() + +tdfs := []*sdk.BulkTDF{ + {Reader: bytes.NewReader(tdf1Bytes), Writer: &buf1}, + {Reader: bytes.NewReader(tdf2Bytes), Writer: &buf2}, +} + +err := client.BulkDecrypt(ctx, + sdk.WithTDFs(tdfs...), + sdk.WithBulkKasAllowlist([]string{"https://kas.example.com"}), +) +if err != nil { + if errs, ok := sdk.FromBulkErrors(err); ok { + for i, e := range errs { + if e != nil { + fmt.Printf("TDF %d failed: %v\n", i, e) + } + } + } +} +``` + +To inspect the prepared request before executing: + +```go +import ( + "context" + "log" + + "github.com/opentdf/platform/sdk" +) + +ctx := context.Background() + +prepared, err := client.PrepareBulkDecrypt(ctx, sdk.WithTDFs(tdfs...)) +if err != nil { + log.Fatal(err) +} +err = prepared.BulkDecrypt(ctx) +``` + +--- + +## TDF Reader + +[`LoadTDF`](#loadtdf) (Go) and [`loadTDF`](#loadtdf) (Java) return a reader object. In JavaScript, the equivalent is the [`TDFReader`](#tdf-reader) returned by `client.open()`. You can call manifest and policy methods on the reader without reading the payload, which is useful for routing, auditing, or display. + +### Payload + +Write the full plaintext to any destination, or read incrementally. + + + + +```go +func (r *Reader) WriteTo(w io.Writer) (int64, error) +func (r *Reader) Read(p []byte) (int, error) // implements io.Reader +``` + +```go +import "bytes" + +var plaintext bytes.Buffer +_, err := tdfReader.WriteTo(&plaintext) +``` + + + + +```java +void reader.readPayload(OutputStream out) throws IOException +``` + +```java +import java.io.ByteArrayOutputStream; + +var output = new ByteArrayOutputStream(); +reader.readPayload(output); +``` + + + + +Use `client.read()` for direct payload access, or `.decrypt()` on a [`TDFReader`](#tdf-reader): + +```typescript +const stream = await reader.decrypt(); +const text = await new Response(stream).text(); +``` + + + + +### Metadata + +Returns the plaintext metadata string attached at encryption time. Returns `nil` / empty string if no metadata was set. + + + + +```go +func (r *Reader) UnencryptedMetadata() ([]byte, error) +``` + +```go +import "fmt" + +meta, err := tdfReader.UnencryptedMetadata() +fmt.Printf("Metadata: %s\n", meta) +``` + + + + +```java +String reader.getMetadata() +``` + +```java +String metadata = reader.getMetadata(); +``` + + + + +`.metadata` on the `DecoratedStream` returned by `client.read()`: + +```typescript +const stream = await client.read({ source: { type: 'buffer', location: encryptedBytes } }); +const metadata = await stream.metadata; +``` + + + + +### Policy + +Returns the [`PolicyObject`](#policyobject) containing the UUID, data attributes, and dissemination list. + + + + +```go +func (r *Reader) Policy() (*policy.PolicyObject, error) +``` + +```go +import "fmt" + +policy, err := tdfReader.Policy() +fmt.Printf("Policy UUID: %s\n", policy.UUID) +``` + + + + +```java +PolicyObject reader.readPolicyObject() throws IOException +``` + +```java +import io.opentdf.platform.sdk.PolicyObject; + +PolicyObject policy = reader.readPolicyObject(); +System.out.println("Policy UUID: " + policy.uuid); +``` + + + + +Policy is embedded in the manifest. Use `reader.manifest()` on a [`TDFReader`](#tdf-reader): + +```typescript +const manifest = await reader.manifest(); +// Policy is at manifest.encryptionInformation.policy +``` + + + + +### Data Attributes + +Returns the list of data attribute FQN strings attached to the [TDF policy](#policyobject). + + + + +```go +func (r *Reader) DataAttributes() ([]string, error) +``` + +```go +import "fmt" + +attrs, err := tdfReader.DataAttributes() +for _, a := range attrs { + fmt.Println(a) +} +``` + + + + +Access via `readPolicyObject()`: + +```java +import io.opentdf.platform.sdk.PolicyObject; + +PolicyObject policy = reader.readPolicyObject(); +for (var attr : policy.body.dataAttributes) { + System.out.println(attr.attribute); +} +``` + + + + +```typescript +async attributes(): Promise +``` + +```typescript +const attrs = await reader.attributes(); +``` + + + + +### Manifest + +Returns the raw [TDF manifest](#manifest-object), including encryption information, key access objects, and assertions. + + + + +```go +func (r *Reader) Manifest() *Manifest +``` + +```go +import "fmt" + +manifest := tdfReader.Manifest() +fmt.Printf("TDF version: %s\n", manifest.TDFVersion) +``` + + + + +```java +Manifest reader.getManifest() +``` + +```java +Manifest manifest = reader.getManifest(); +System.out.println("TDF version: " + manifest.tdfVersion); +``` + + + + +```typescript +async manifest(): Promise +``` + +```typescript +const manifest = await reader.manifest(); +``` + + + + +--- + +## Type Reference + +The following types are returned by or passed to the methods above. + +### KASInfo + +`KASInfo` is the input type passed to `WithKasInformation` (Go) or used to build a `Config.KASInfo` (Java). It identifies a KAS endpoint and the key configuration to use when wrapping the data encryption key. + +**Fields** + +| Field | Go | Java | Required | Description | +|-------|-----|------|----------|-------------| +| URL | `URL string` | `String URL` | Required | The KAS endpoint URL. | +| Algorithm | `Algorithm string` | `String Algorithm` | Optional | Wrapping key algorithm (e.g. `"ec:secp256r1"`). Defaults to `"rsa:2048"` if empty. | +| KID | `KID string` | `String KID` | Optional | Key identifier on the KAS, used when the KAS hosts multiple keys. | +| PublicKey | `PublicKey string` | `String PublicKey` | Optional | PEM-encoded public key. If empty, the SDK fetches it from the KAS. | +| Default | `Default bool` | — | Optional | Go only. If `true`, this KAS is used as the default for encrypt calls when no KAS is explicitly specified. | + +**Example** + + + + +```go +import "github.com/opentdf/platform/sdk" + +kas := sdk.KASInfo{ + URL: "https://kas.example.com", + Algorithm: "ec:secp256r1", + KID: "my-key-1", +} + +manifest, err := client.CreateTDF(&buf, plaintext, + sdk.WithKasInformation(kas), +) +``` + + + + +```java +import io.opentdf.platform.sdk.Config; + +var kasInfo = new Config.KASInfo(); +kasInfo.URL = "https://kas.example.com"; +kasInfo.Algorithm = "ec:secp256r1"; +kasInfo.KID = "my-key-1"; + +Config.TDFConfig config = Config.newTDFConfig( + Config.withKasInformation(kasInfo) +); +``` + + + + +--- + +### PolicyObject + +`PolicyObject` is returned by [`tdfReader.Policy()`](#policy) (Go) and `reader.readPolicyObject()` (Java). It contains the decoded access control policy embedded in the TDF — including the UUID, the list of data attribute FQNs, and the dissemination list. + +**Fields** + +| Field | Go | Java | Description | +|-------|-----|------|-------------| +| UUID | `UUID string` | `String uuid` | Unique identifier for this policy. | +| Body.DataAttributes | `[]attributeObject` | `List` | Data attribute FQNs governing access. Each entry has an `Attribute` (FQN string) and `KasURL`. | +| Body.Dissem | `[]string` | `List` | Dissemination list — entity identifiers explicitly granted access. | + +**Example** + + + + +```go +import ( + "fmt" + + "github.com/opentdf/platform/sdk" +) + +policy, err := tdfReader.Policy() +if err != nil { + log.Fatal(err) +} + +fmt.Println("Policy UUID:", policy.UUID) +for _, attr := range policy.Body.DataAttributes { + fmt.Printf(" Attribute: %s (KAS: %s)\n", attr.Attribute, attr.KasURL) +} +for _, entity := range policy.Body.Dissem { + fmt.Println(" Dissem:", entity) +} +``` + + + + +```java +import io.opentdf.platform.sdk.Config; +import io.opentdf.platform.sdk.PolicyObject; +import io.opentdf.platform.sdk.TDF; + +PolicyObject policy = reader.readPolicyObject(); +System.out.println("Policy UUID: " + policy.uuid); + +for (var attr : policy.body.dataAttributes) { + System.out.println(" Attribute: " + attr.attribute + " (KAS: " + attr.kasURL + ")"); +} +for (var entity : policy.body.dissem) { + System.out.println(" Dissem: " + entity); +} +``` + + + + +The policy is embedded in the [manifest](#manifest-object). Use `reader.manifest()` on a [`TDFReader`](#tdf-reader) from `client.open()`: + +```typescript +const manifest = await reader.manifest(); +const policy = JSON.parse(atob(manifest.encryptionInformation.policy)); +console.log('Policy UUID:', policy.uuid); +console.log('Attributes:', policy.body.dataAttributes); +``` + + + + +--- + +### Manifest Object + +The `Manifest` type is returned by [`CreateTDF`](#createtdf) and accessible via [`tdfReader.Manifest()`](#manifest). It describes the full TDF structure — encryption method, key access configuration, payload metadata, and any attached assertions — without requiring decryption. + +**Top-Level Fields** + +| Field | Type | Description | +|-------|------|-------------| +| `TDFVersion` | `string` | TDF spec version (e.g. `"4.3.0"`). Serialized as `schemaVersion` in the manifest JSON. | +| `EncryptionInformation` | `EncryptionInformation` | Encryption method, key access objects, and integrity information. | +| `Payload` | `Payload` | Metadata about the encrypted payload. | +| `Assertions` | `[]Assertion` | Cryptographically bound or signed statements attached to the TDF. Empty if none were added at encrypt time. | + +**EncryptionInformation Fields** + +| Field | Type | Description | +|-------|------|-------------| +| `KeyAccessObjs` | `[]KeyAccess` | One entry per KAS holding a key grant. Split TDFs have multiple entries. | +| `Method.Algorithm` | `string` | Encryption algorithm (e.g. `"AES-256-GCM"`). | +| `Policy` | `string` | Base64-encoded policy object. Use [`tdfReader.Policy()`](#policy) to decode. | + +**KeyAccess Fields** + +| Field | Type | Description | +|-------|------|-------------| +| `KasURL` | `string` | URL of the KAS holding the key grant. | +| `KeyType` | `string` | Key access type (`"wrapped"` or `"remote"`). | +| `SplitID` | `string` | Key split identifier. Entries sharing the same ID share a key segment; different IDs represent independent splits. | +| `KID` | `string` | Key identifier on the KAS, if set. | + +**Payload Fields** + +| Field | Type | Description | +|-------|------|-------------| +| `MimeType` | `string` | MIME type of the plaintext payload. Empty string if not set at encrypt time. | +| `IsEncrypted` | `bool` | Always `true` in a valid TDF. | +| `Protocol` | `string` | Protocol identifier (e.g. `"zip"`). | + +**Example** + +```go +import ( + "fmt" + + "github.com/opentdf/platform/sdk" +) + +manifest := tdfReader.Manifest() +fmt.Println("TDF version:", manifest.TDFVersion) +fmt.Println("Algorithm:", manifest.EncryptionInformation.Method.Algorithm) +fmt.Println("MIME type:", manifest.Payload.MimeType) + +for _, ka := range manifest.EncryptionInformation.KeyAccessObjs { + fmt.Printf("KAS: %s (split: %s)\n", ka.KasURL, ka.SplitID) +} + +for _, a := range manifest.Assertions { + fmt.Printf("Assertion %s [%s]: %s\n", a.ID, a.Type, a.Statement.Value) +} +``` + +:::note Cross-language +In Java, `reader.getManifest()` returns a `Manifest` object with equivalent fields. In JavaScript, `reader.manifest()` returns the manifest as a parsed JSON object matching the TDF spec's `manifest.json` structure. +::: + +--- + + + +--- + +