From fafb390cb0189c3f6ddbc7c64bb6b2c1742f39b0 Mon Sep 17 00:00:00 2001 From: Saksham Garg Date: Tue, 17 Mar 2026 16:43:45 +0530 Subject: [PATCH 01/10] update cmdlets as per frontend spec changes + test fixes+ help --- src/managedcleanroom/HISTORY.rst | 32 +- .../_frontend_commands.py | 35 +- .../_frontend_custom.py | 178 +- .../azext_managedcleanroom/_help.py | 248 +- .../azext_managedcleanroom/_params.py | 76 +- .../analytics_frontend_api/__init__.py | 3 +- .../analytics_frontend_api/_configuration.py | 4 +- .../analytics_frontend_api/_utils/__init__.py | 1 - .../_utils/serialization.py | 17 +- .../analytics_frontend_api/aio/__init__.py | 4 +- .../aio/_configuration.py | 3 +- .../analytics_frontend_api/aio/_patch.py | 2 - .../aio/operations/__init__.py | 4 +- .../aio/operations/_operations.py | 2009 +++++++------- .../operations/__init__.py | 3 +- .../operations/_operations.py | 2421 ++++++++--------- .../latest/test_frontend_collaboration.py | 214 +- .../tests/latest/test_frontend_misc.py | 113 +- .../tests/latest/test_frontend_query.py | 31 +- src/managedcleanroom/setup.py | 2 +- 20 files changed, 2650 insertions(+), 2750 deletions(-) diff --git a/src/managedcleanroom/HISTORY.rst b/src/managedcleanroom/HISTORY.rst index 6e046ab8d21..32e1436f3a8 100644 --- a/src/managedcleanroom/HISTORY.rst +++ b/src/managedcleanroom/HISTORY.rst @@ -3,11 +3,39 @@ Release History =============== +1.0.0b4 ++++++++ +* BREAKING CHANGE: Removed deprecated commands (APIs no longer supported in SDK): + - `az managedcleanroom frontend workloads list` + - `az managedcleanroom frontend analytics deploymentinfo` + - `az managedcleanroom frontend attestation cgs` + - `az managedcleanroom frontend analytics attestationreport cleanroom` + - `az managedcleanroom frontend analytics query vote accept` + - `az managedcleanroom frontend analytics query vote reject` +* BREAKING CHANGE: Consent action values changed from 'accept/reject' to 'enable/disable' +* BREAKING CHANGE: Vote commands consolidated into single unified endpoint +* Added: `az managedcleanroom frontend report` - Comprehensive attestation report (replaces cgs/cleanroom commands) +* Added: `az managedcleanroom frontend oidc set-issuer-url` - Configure OIDC issuer URL +* Added: `az managedcleanroom frontend oidc keys` - Get OIDC signing keys (JWKS) +* Added: `az managedcleanroom frontend analytics dataset queries` - List queries using a specific dataset +* Added: `az managedcleanroom frontend analytics secret set` - Set analytics secrets +* Added: `az managedcleanroom frontend analytics query vote` - Unified vote command with --vote-action parameter +* Updated: Added --active-only filter to collaboration list and show commands +* Updated: Added --pending-only filter to invitation list command +* Updated: Added --scope, --from-seqno, --to-seqno filters to audit event list command +* Updated: Response structures modernized (many list endpoints now return structured objects with value arrays) +* Updated: Regenerated frontend SDK from OpenAPI spec to support new APIs + +1.0.0b3 ++++++++ +* Regenerate analytics_frontend_api SDK from updated frontend.yaml OpenAPI spec +* Remove empty enum value from ApplicationState schema + 1.0.0b1 -++++++ ++++++++ * Initial release. 1.0.0b2 -++++++ ++++++++ * Add frontend commandlets * Add MSAL device code flow authentication \ No newline at end of file diff --git a/src/managedcleanroom/azext_managedcleanroom/_frontend_commands.py b/src/managedcleanroom/azext_managedcleanroom/_frontend_commands.py index 26ac237ea89..702ba71dc9c 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_frontend_commands.py +++ b/src/managedcleanroom/azext_managedcleanroom/_frontend_commands.py @@ -26,22 +26,21 @@ def load_frontend_command_table(loader, _): # Show command at frontend level (requires --collaboration-id) with loader.command_group('managedcleanroom frontend', custom_command_type=frontend_custom) as g: g.custom_show_command('show', 'frontend_collaboration_show') - - # Workloads commands - with loader.command_group('managedcleanroom frontend workloads', custom_command_type=frontend_custom) as g: - g.custom_command('list', 'frontend_collaboration_workloads_list') + g.custom_show_command('report', 'frontend_collaboration_report_show') # Analytics commands with loader.command_group('managedcleanroom frontend analytics', custom_command_type=frontend_custom) as g: g.custom_show_command('show', 'frontend_collaboration_analytics_show') - g.custom_command( - 'deploymentinfo', - 'frontend_collaboration_analytics_deploymentinfo') g.custom_command( 'cleanroompolicy', 'frontend_collaboration_analytics_cleanroompolicy') # OIDC commands + with loader.command_group('managedcleanroom frontend oidc', custom_command_type=frontend_custom) as g: + g.custom_command('set-issuer-url', 'frontend_collaboration_oidc_set_issuer_url') + g.custom_show_command('keys', 'frontend_collaboration_oidc_keys_show') + + # Keep issuerinfo for backwards compatibility with loader.command_group('managedcleanroom frontend oidc issuerinfo', custom_command_type=frontend_custom) as g: g.custom_show_command( 'show', 'frontend_collaboration_oidc_issuerinfo_show') @@ -57,6 +56,7 @@ def load_frontend_command_table(loader, _): g.custom_command('list', 'frontend_collaboration_dataset_list') g.custom_show_command('show', 'frontend_collaboration_dataset_show') g.custom_command('publish', 'frontend_collaboration_dataset_publish') + g.custom_command('queries', 'frontend_collaboration_dataset_queries_list') # Consent commands with loader.command_group('managedcleanroom frontend consent', custom_command_type=frontend_custom) as g: @@ -69,13 +69,7 @@ def load_frontend_command_table(loader, _): g.custom_show_command('show', 'frontend_collaboration_query_show') g.custom_command('publish', 'frontend_collaboration_query_publish') g.custom_command('run', 'frontend_collaboration_query_run') - - # Query vote commands - with loader.command_group( - 'managedcleanroom frontend analytics query vote', - custom_command_type=frontend_custom) as g: - g.custom_command('accept', 'frontend_collaboration_query_vote_accept') - g.custom_command('reject', 'frontend_collaboration_query_vote_reject') + g.custom_command('vote', 'frontend_collaboration_query_vote') # Query run history commands with loader.command_group( @@ -97,16 +91,9 @@ def load_frontend_command_table(loader, _): custom_command_type=frontend_custom) as g: g.custom_command('list', 'frontend_collaboration_audit_list') - # Attestation commands - with loader.command_group('managedcleanroom frontend attestation', custom_command_type=frontend_custom) as g: - g.custom_command('cgs', 'frontend_collaboration_attestation_cgs') - - with loader.command_group( - 'managedcleanroom frontend analytics attestationreport', - custom_command_type=frontend_custom) as g: - g.custom_command( - 'cleanroom', - 'frontend_collaboration_attestation_cleanroom') + # Analytics secrets command + with loader.command_group('managedcleanroom frontend analytics secret', custom_command_type=frontend_custom) as g: + g.custom_command('set', 'frontend_collaboration_analytics_secret_set') # Configuration and authentication commands with loader.command_group('managedcleanroom frontend', custom_command_type=frontend_custom) as g: diff --git a/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py b/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py index b359b1af880..b6bde9e5470 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py +++ b/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py @@ -16,40 +16,41 @@ # Base Collaboration Commands # ============================================================================ -def frontend_collaboration_list(cmd): +def frontend_collaboration_list(cmd, active_only=False): """List all collaborations :param cmd: CLI command context - :return: List of collaboration IDs + :param active_only: When true, returns only active collaborations (default: False) + :return: List of collaboration objects with collaborationId, collaborationName, userStatus """ client = get_frontend_client(cmd) - return client.collaboration.list() + return client.collaboration.list(active_only=active_only) -def frontend_collaboration_show(cmd, collaboration_id): +def frontend_collaboration_show(cmd, collaboration_id, active_only=False): """Show collaboration details :param cmd: CLI command context :param collaboration_id: Collaboration identifier + :param active_only: When true, queries only active collaborations (default: False) :return: Collaboration details """ client = get_frontend_client(cmd) - return client.collaboration.id_get(collaboration_id) + return client.collaboration.id_get(collaboration_id, active_only=active_only) -# ============================================================================ -# Workloads Commands -# ============================================================================ +def frontend_collaboration_report_show(cmd, collaboration_id): + """Get collaboration report (comprehensive attestation report) -def frontend_collaboration_workloads_list(cmd, collaboration_id): - """List workloads for a collaboration + Replaces the deprecated attestation cgs and cleanroom commands. + Returns attestation reports from CGS and consortium manager. :param cmd: CLI command context :param collaboration_id: Collaboration identifier - :return: List of workloads + :return: Collaboration report with cgs and consortiumManager attestation details """ client = get_frontend_client(cmd) - return client.collaboration.workloads_get(collaboration_id) + return client.collaboration.report_get(collaboration_id) # ============================================================================ @@ -67,18 +68,6 @@ def frontend_collaboration_analytics_show(cmd, collaboration_id): return client.collaboration.analytics_get(collaboration_id) -def frontend_collaboration_analytics_deploymentinfo(cmd, collaboration_id): - """Get deployment info for analytics workload - - :param cmd: CLI command context - :param collaboration_id: Collaboration identifier - :return: Deployment information - """ - client = get_frontend_client(cmd) - return client.collaboration.analytics_deployment_info_get( - collaboration_id) - - def frontend_collaboration_analytics_cleanroompolicy(cmd, collaboration_id): """Get cleanroom policy for analytics workload @@ -106,19 +95,44 @@ def frontend_collaboration_oidc_issuerinfo_show(cmd, collaboration_id): return client.collaboration.oidc_issuer_info_get(collaboration_id) +def frontend_collaboration_oidc_set_issuer_url(cmd, collaboration_id, url): + """Set collaboration OIDC issuer URL + + :param cmd: CLI command context + :param collaboration_id: Collaboration identifier + :param url: OIDC issuer URL + :return: Operation result + """ + body = {"url": url} + client = get_frontend_client(cmd) + return client.collaboration.oidc_set_issuer_url_post(collaboration_id, body=body) + + +def frontend_collaboration_oidc_keys_show(cmd, collaboration_id): + """Get collaboration OIDC signing keys (JWKS format) + + :param cmd: CLI command context + :param collaboration_id: Collaboration identifier + :return: OIDC keys in JWKS format + """ + client = get_frontend_client(cmd) + return client.collaboration.oidc_keys_get(collaboration_id) + + # ============================================================================ # Invitation Commands # ============================================================================ -def frontend_collaboration_invitation_list(cmd, collaboration_id): +def frontend_collaboration_invitation_list(cmd, collaboration_id, pending_only=False): """List invitations for a collaboration :param cmd: CLI command context :param collaboration_id: Collaboration identifier - :return: List of invitations + :param pending_only: When true, returns only pending invitations (default: False) + :return: Invitations object with array of invitation details """ client = get_frontend_client(cmd) - return client.collaboration.invitations_get(collaboration_id) + return client.collaboration.invitations_get(collaboration_id, pending_only=pending_only) def frontend_collaboration_invitation_show( @@ -198,6 +212,19 @@ def frontend_collaboration_dataset_publish( collaboration_id, document_id, body) +def frontend_collaboration_dataset_queries_list(cmd, collaboration_id, document_id): + """List queries that use a specific dataset + + :param cmd: CLI command context + :param collaboration_id: Collaboration identifier + :param document_id: Dataset document identifier + :return: List of query IDs using this dataset + """ + client = get_frontend_client(cmd) + return client.collaboration.analytics_datasets_document_id_queries_get( + collaboration_id, document_id) + + # ============================================================================ # Consent Commands # ============================================================================ @@ -219,15 +246,18 @@ def frontend_collaboration_consent_set( cmd, collaboration_id, document_id, consent_action): """Set consent document action + NOTE: API changed - consent action is now 'enable' or 'disable' (not accept/reject) + :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param document_id: Consent document identifier - :param consent_action: Consent action (e.g., 'enable', 'disable') + :param consent_action: Consent action ('enable' or 'disable') :return: Action result """ + body = {"consentAction": consent_action} client = get_frontend_client(cmd) - return client.collaboration.set_consent_document_id_consent_action_post( - collaboration_id, document_id, consent_action + return client.collaboration.set_consent_document_id_put( + collaboration_id, document_id, body=body ) @@ -314,52 +344,26 @@ def frontend_collaboration_query_run( collaboration_id, document_id, body=body) -# ============================================================================ -# Query Vote Commands -# ============================================================================ - -def frontend_collaboration_query_vote_accept( - cmd, collaboration_id, document_id, body=None): - """Accept query vote - - :param cmd: CLI command context - :param collaboration_id: Collaboration identifier - :param document_id: Query document identifier - :param body: Optional vote configuration JSON (string, dict, or @file) - :return: Vote result - """ - import json - - # Handle body parameter - convert string to dict if needed - if body and isinstance(body, str): - body = json.loads(body) - - client = get_frontend_client(cmd) - return client.collaboration.analytics_queries_document_id_vote_accept_post( - collaboration_id, document_id, body=body - ) - - -def frontend_collaboration_query_vote_reject( - cmd, collaboration_id, document_id, body=None): - """Reject query vote +def frontend_collaboration_query_vote(cmd, collaboration_id, document_id, vote_action, proposal_id=None): + """Vote on a query (unified accept/reject endpoint) :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param document_id: Query document identifier - :param body: Optional vote configuration JSON (string, dict, or @file) - :return: Vote result + :param vote_action: Vote action ('accept' or 'reject') + :param proposal_id: Optional proposal ID + :return: Vote result (None on success - 204 No Content) """ - import json + body = { + "voteAction": vote_action + } - # Handle body parameter - convert string to dict if needed - if body and isinstance(body, str): - body = json.loads(body) + if proposal_id: + body["proposalId"] = proposal_id client = get_frontend_client(cmd) - return client.collaboration.analytics_queries_document_id_vote_reject_post( - collaboration_id, document_id, body=body - ) + return client.collaboration.analytics_queries_document_id_vote_post( + collaboration_id, document_id, body=body) # ============================================================================ @@ -399,42 +403,34 @@ def frontend_collaboration_query_runresult_show( # Audit Commands # ============================================================================ -def frontend_collaboration_audit_list(cmd, collaboration_id): +def frontend_collaboration_audit_list(cmd, collaboration_id, scope=None, from_seqno=None, to_seqno=None): """List audit events for a collaboration :param cmd: CLI command context :param collaboration_id: Collaboration identifier - :return: List of audit events - """ - client = get_frontend_client(cmd) - return client.collaboration.analytics_auditevents_get(collaboration_id) - - -# ============================================================================ -# Attestation Commands -# ============================================================================ - -def frontend_collaboration_attestation_cgs(cmd, collaboration_id): - """Get CGS attestation report - - :param cmd: CLI command context - :param collaboration_id: Collaboration identifier - :return: CGS attestation report + :param scope: Optional scope filter + :param from_seqno: Optional starting sequence number + :param to_seqno: Optional ending sequence number + :return: Paginated audit events with nextLink and value array """ client = get_frontend_client(cmd) - return client.collaboration.attestationreport_cgs_get(collaboration_id) + return client.collaboration.analytics_auditevents_get( + collaboration_id, scope=scope, from_seqno=from_seqno, to_seqno=to_seqno) -def frontend_collaboration_attestation_cleanroom(cmd, collaboration_id): - """Get cleanroom attestation report +def frontend_collaboration_analytics_secret_set(cmd, collaboration_id, secret_name, secret_value): + """Set secret for analytics workload :param cmd: CLI command context :param collaboration_id: Collaboration identifier - :return: Cleanroom attestation report + :param secret_name: Secret name + :param secret_value: Secret value + :return: Operation result """ + body = {"secretValue": secret_value} client = get_frontend_client(cmd) - return client.collaboration.attestationreport_cleanroom_get( - collaboration_id) + return client.collaboration.analytics_secrets_secret_name_put( + collaboration_id, secret_name, body=body) # ============================================================================ diff --git a/src/managedcleanroom/azext_managedcleanroom/_help.py b/src/managedcleanroom/azext_managedcleanroom/_help.py index ba30e9980c9..24646974c2d 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_help.py +++ b/src/managedcleanroom/azext_managedcleanroom/_help.py @@ -116,6 +116,8 @@ examples: - name: List all collaborations text: az managedcleanroom frontend collaboration list + - name: List only active collaborations + text: az managedcleanroom frontend collaboration list --active-only """ helps['managedcleanroom frontend show'] = """ @@ -124,24 +126,30 @@ examples: - name: Show collaboration details text: az managedcleanroom frontend show --collaboration-id + - name: Show collaboration details (active collaborations only) + text: az managedcleanroom frontend show --collaboration-id --active-only """ # ============================================================================ -# Workloads Help +# Report Help # ============================================================================ -helps['managedcleanroom frontend workloads'] = """ - type: group - short-summary: Manage collaboration workloads -""" - -helps['managedcleanroom frontend workloads list'] = """ +helps['managedcleanroom frontend report'] = """ type: command - short-summary: List workloads for a collaboration + short-summary: Get comprehensive attestation report + long-summary: | + Retrieves a comprehensive attestation report for a collaboration, + including CGS attestation and cleanroom attestation information. + + This command replaces the deprecated 'attestation cgs' and + 'attestationreport cleanroom' commands, providing a unified + report endpoint. examples: - - name: List workloads - text: az managedcleanroom frontend workloads list -c + - name: Get attestation report for a collaboration + text: az managedcleanroom frontend report --collaboration-id + - name: Get attestation report (short form) + text: az managedcleanroom frontend report -c """ @@ -162,14 +170,6 @@ text: az managedcleanroom frontend analytics show -c """ -helps['managedcleanroom frontend analytics deploymentinfo'] = """ - type: command - short-summary: Get deployment information - examples: - - name: Get deployment info - text: az managedcleanroom frontend analytics deploymentinfo -c -""" - helps['managedcleanroom frontend analytics cleanroompolicy'] = """ type: command short-summary: Get cleanroom policy @@ -179,6 +179,33 @@ """ +# ============================================================================ +# Analytics Secret Help +# ============================================================================ + +helps['managedcleanroom frontend analytics secret'] = """ + type: group + short-summary: Manage analytics secrets +""" + +helps['managedcleanroom frontend analytics secret set'] = """ + type: command + short-summary: Set an analytics secret + long-summary: | + Sets or updates a secret in the analytics collaboration. Secrets are + used for secure configuration values needed by analytics workloads. + examples: + - name: Set an analytics secret + text: | + az managedcleanroom frontend analytics secret set \ + --collaboration-id \ + --secret-name \ + --secret-value + - name: Set secret (short form) + text: az managedcleanroom frontend analytics secret set -c --secret-name mySecret --secret-value myValue +""" + + # ============================================================================ # OIDC Help # ============================================================================ @@ -188,6 +215,36 @@ short-summary: Manage OIDC configuration """ +helps['managedcleanroom frontend oidc set-issuer-url'] = """ + type: command + short-summary: Set OIDC issuer URL for a collaboration + long-summary: | + Configures the OIDC issuer URL for the collaboration. This URL is used + for OpenID Connect authentication and token validation. + examples: + - name: Set OIDC issuer URL + text: | + az managedcleanroom frontend oidc set-issuer-url \ + --collaboration-id \ + --url https://oidc.example.com + - name: Set OIDC issuer URL (short form) + text: az managedcleanroom frontend oidc set-issuer-url -c --url https://oidc.example.com +""" + +helps['managedcleanroom frontend oidc keys'] = """ + type: command + short-summary: Get OIDC signing keys (JWKS) + long-summary: | + Retrieves the JSON Web Key Set (JWKS) containing the public keys used + to verify OIDC tokens issued by the collaboration. This endpoint returns + the keys in standard JWKS format. + examples: + - name: Get OIDC signing keys + text: az managedcleanroom frontend oidc keys --collaboration-id + - name: Get OIDC signing keys (short form) + text: az managedcleanroom frontend oidc keys -c +""" + helps['managedcleanroom frontend oidc issuerinfo'] = """ type: group short-summary: Manage OIDC issuer information @@ -215,8 +272,10 @@ type: command short-summary: List invitations for a collaboration examples: - - name: List invitations + - name: List all invitations text: az managedcleanroom frontend invitation list -c + - name: List only pending invitations + text: az managedcleanroom frontend invitation list -c --pending-only """ helps['managedcleanroom frontend invitation show'] = """ @@ -261,6 +320,23 @@ text: az managedcleanroom frontend analytics dataset show -c -d """ +helps['managedcleanroom frontend analytics dataset queries'] = """ + type: command + short-summary: List queries that use a specific dataset + long-summary: | + Retrieves a list of all queries that reference the specified dataset + as an input. This is useful for understanding dataset dependencies + and impact analysis. + examples: + - name: List queries using a dataset + text: | + az managedcleanroom frontend analytics dataset queries \ + --collaboration-id \ + --document-id + - name: List queries using a dataset (short form) + text: az managedcleanroom frontend analytics dataset queries -c -d +""" + helps['managedcleanroom frontend analytics dataset publish'] = """ type: command short-summary: Publish a dataset @@ -309,6 +385,11 @@ helps['managedcleanroom frontend consent set'] = """ type: command short-summary: Set consent document action + long-summary: | + Enables or disables consent for a specific document in the collaboration. + + Note: This command was updated in version 1.0.0b4. The consent actions + changed from 'accept/reject' to 'enable/disable'. examples: - name: Enable consent text: az managedcleanroom frontend consent set -c --document-id --consent-action enable @@ -415,70 +496,37 @@ # ============================================================================ helps['managedcleanroom frontend analytics query vote'] = """ - type: group - short-summary: Manage query voting -""" - -helps['managedcleanroom frontend analytics query vote accept'] = """ type: command - short-summary: Accept a query vote + short-summary: Vote on a query (accept or reject) long-summary: | - Accepts a query vote for the specified collaboration and query document. - Optionally accepts a --body parameter for additional vote configuration. - parameters: - - name: --body - type: string - short-summary: Optional vote configuration (JSON string or @file path) - long-summary: | - Optional JSON configuration containing: - - reason: Text explanation for accepting the vote - - metadata: Additional metadata for the vote - examples: - - name: Accept query vote - text: az managedcleanroom frontend analytics query vote accept -c --document-id - - name: Accept query vote with reason - text: | - az managedcleanroom frontend analytics query vote accept \ - -c \ - --document-id \ - --body '{"reason": "Query meets all compliance requirements"}' - - name: Accept query vote with configuration from file - text: | - az managedcleanroom frontend analytics query vote accept \ - -c \ - --document-id \ - --body @vote-config.json -""" + Submits a vote for a query in the collaboration. This unified endpoint + allows you to accept or reject a query with a single command. -helps['managedcleanroom frontend analytics query vote reject'] = """ - type: command - short-summary: Reject a query vote - long-summary: | - Rejects a query vote for the specified collaboration and query document. - Optionally accepts a --body parameter for additional vote configuration. + This command replaces the deprecated 'vote accept' and 'vote reject' + commands in version 1.0.0b4. parameters: - - name: --body + - name: --vote-action type: string - short-summary: Optional vote configuration (JSON string or @file path) + short-summary: Vote action (accept or reject) long-summary: | - Optional JSON configuration containing: - - reason: Text explanation for rejecting the vote - - metadata: Additional metadata for the vote + The vote action to perform: + - accept: Approve the query + - reject: Reject the query examples: - - name: Reject query vote - text: az managedcleanroom frontend analytics query vote reject -c --document-id - - name: Reject query vote with reason + - name: Accept a query vote text: | - az managedcleanroom frontend analytics query vote reject \ - -c \ + az managedcleanroom frontend analytics query vote \ + --collaboration-id \ --document-id \ - --body '{"reason": "Query violates data access policy"}' - - name: Reject query vote with configuration from file + --vote-action accept + - name: Reject a query vote text: | - az managedcleanroom frontend analytics query vote reject \ - -c \ + az managedcleanroom frontend analytics query vote \ + --collaboration-id \ --document-id \ - --body @vote-config.json + --vote-action reject + - name: Accept query vote (short form) + text: az managedcleanroom frontend analytics query vote -c -d --vote-action accept """ @@ -528,38 +576,36 @@ helps['managedcleanroom frontend analytics auditevent list'] = """ type: command short-summary: List audit events for a collaboration + long-summary: | + Retrieves audit events for a collaboration with optional filtering. + + Filter options: + - scope: Filter by audit event scope (e.g., 'dataset', 'query', 'collaboration') + - from_seqno: Starting sequence number for event range + - to_seqno: Ending sequence number for event range examples: - - name: List audit events + - name: List all audit events text: az managedcleanroom frontend analytics auditevent list -c + - name: List audit events for dataset scope + text: | + az managedcleanroom frontend analytics auditevent list \ + --collaboration-id \ + --scope dataset + - name: List audit events in sequence number range + text: | + az managedcleanroom frontend analytics auditevent list \ + --collaboration-id \ + --from-seqno 100 \ + --to-seqno 200 + - name: List dataset audit events in specific range + text: | + az managedcleanroom frontend analytics auditevent list \ + -c \ + --scope dataset \ + --from-seqno 50 \ + --to-seqno 150 """ - # ============================================================================ -# Attestation Help +# Attestation Help (Deprecated - Use 'report' command instead) # ============================================================================ - -helps['managedcleanroom frontend attestation'] = """ - type: group - short-summary: View attestation reports -""" - -helps['managedcleanroom frontend attestation cgs'] = """ - type: command - short-summary: Get CGS attestation report - examples: - - name: Get CGS attestation report - text: az managedcleanroom frontend attestation cgs -c -""" - -helps['managedcleanroom frontend analytics attestationreport'] = """ - type: group - short-summary: View attestation reports -""" - -helps['managedcleanroom frontend analytics attestationreport cleanroom'] = """ - type: command - short-summary: Get cleanroom attestation report - examples: - - name: Get cleanroom attestation report - text: az managedcleanroom frontend analytics attestationreport cleanroom -c -""" diff --git a/src/managedcleanroom/azext_managedcleanroom/_params.py b/src/managedcleanroom/azext_managedcleanroom/_params.py index 7b191452a54..ba911114549 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_params.py +++ b/src/managedcleanroom/azext_managedcleanroom/_params.py @@ -35,15 +35,29 @@ def load_arguments(self, _): # pylint: disable=unused-argument consent_action_type = CLIArgumentType( options_list=['--consent-action', '-a'], - help='Consent action (enable/disable)' + help="Consent action: 'enable' or 'disable'" + ) + + vote_action_type = CLIArgumentType( + options_list=['--vote-action'], + help="Vote action: 'accept' or 'reject'" ) # Show command context with self.argument_context('managedcleanroom frontend show') as c: c.argument('collaboration_id', collaboration_id_type) - - # Workloads context - with self.argument_context('managedcleanroom frontend workloads') as c: + c.argument('active_only', options_list=['--active-only'], + action='store_true', + help='Query only active collaborations') + + # Collaboration list context + with self.argument_context('managedcleanroom frontend collaboration list') as c: + c.argument('active_only', options_list=['--active-only'], + action='store_true', + help='Filter to active collaborations only') + + # Report context + with self.argument_context('managedcleanroom frontend report') as c: c.argument('collaboration_id', collaboration_id_type) # Analytics context @@ -54,10 +68,24 @@ def load_arguments(self, _): # pylint: disable=unused-argument with self.argument_context('managedcleanroom frontend oidc issuerinfo') as c: c.argument('collaboration_id', collaboration_id_type) + # OIDC set-issuer-url context + with self.argument_context('managedcleanroom frontend oidc set-issuer-url') as c: + c.argument('collaboration_id', collaboration_id_type) + c.argument('url', options_list=['--url'], help='OIDC issuer URL') + + # OIDC keys context + with self.argument_context('managedcleanroom frontend oidc keys') as c: + c.argument('collaboration_id', collaboration_id_type) + # Invitation context with self.argument_context('managedcleanroom frontend invitation') as c: c.argument('collaboration_id', collaboration_id_type) + with self.argument_context('managedcleanroom frontend invitation list') as c: + c.argument('pending_only', options_list=['--pending-only'], + action='store_true', + help='Filter to pending invitations only') + with self.argument_context('managedcleanroom frontend invitation show') as c: c.argument('invitation_id', invitation_id_type) @@ -79,6 +107,10 @@ def load_arguments(self, _): # pylint: disable=unused-argument help='JSON string or @file path containing publish configuration. ' 'Must include datasetAccessPoint with name, path, and protection details.') + # Dataset queries context + with self.argument_context('managedcleanroom frontend analytics dataset queries') as c: + c.argument('document_id', document_id_type) + # Consent context with self.argument_context('managedcleanroom frontend consent') as c: c.argument('collaboration_id', collaboration_id_type) @@ -108,24 +140,13 @@ def load_arguments(self, _): # pylint: disable=unused-argument 'body', type=str, help='JSON string or @file path containing run configuration. ' 'Optional fields: runId (auto-generated if not provided), dryRun, startDate, endDate, useOptimizer.') - # Query vote context + # Query vote context (unified) with self.argument_context('managedcleanroom frontend analytics query vote') as c: c.argument('collaboration_id', collaboration_id_type) c.argument('document_id', document_id_type) - - # Add body parameter for vote accept - with self.argument_context('managedcleanroom frontend analytics query vote accept') as c: - c.argument( - 'body', - type=str, - help='Optional JSON string or @file path containing vote accept configuration.') - - # Add body parameter for vote reject - with self.argument_context('managedcleanroom frontend analytics query vote reject') as c: - c.argument( - 'body', - type=str, - help='Optional JSON string or @file path containing vote reject configuration.') + c.argument('vote_action', vote_action_type) + c.argument('proposal_id', options_list=['--proposal-id'], + help='Optional proposal ID') # Query runhistory context with self.argument_context('managedcleanroom frontend analytics query runhistory') as c: @@ -145,6 +166,23 @@ def load_arguments(self, _): # pylint: disable=unused-argument with self.argument_context('managedcleanroom frontend analytics auditevent') as c: c.argument('collaboration_id', collaboration_id_type) + # Audit event list context + with self.argument_context('managedcleanroom frontend analytics auditevent list') as c: + c.argument('scope', options_list=['--scope'], + help='Optional scope filter') + c.argument('from_seqno', options_list=['--from-seqno'], + help='Optional starting sequence number') + c.argument('to_seqno', options_list=['--to-seqno'], + help='Optional ending sequence number') + + # Analytics secrets context + with self.argument_context('managedcleanroom frontend analytics secret set') as c: + c.argument('collaboration_id', collaboration_id_type) + c.argument('secret_name', options_list=['--secret-name', '-n'], + help='Secret name') + c.argument('secret_value', options_list=['--secret-value', '-v'], + help='Secret value') + # Attestation context with self.argument_context('managedcleanroom frontend attestation') as c: c.argument('collaboration_id', collaboration_id_type) diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/__init__.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/__init__.py index 2cf24dc7f73..e3229dff489 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/__init__.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/__init__.py @@ -4,12 +4,11 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position -# flake8: noqa from typing import TYPE_CHECKING if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import * # pylint: disable=unused-wildcard-import # noqa: F403 from ._client import AnalyticsFrontendAPI # type: ignore diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_configuration.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_configuration.py index eeaf278d296..71a9173c647 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_configuration.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_configuration.py @@ -3,8 +3,6 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- -# flake8: noqa -# pylint: disable=too-few-public-methods from typing import Any @@ -13,7 +11,7 @@ VERSION = "unknown" -class AnalyticsFrontendAPIConfiguration: # pylint: disable=too-many-instance-attributes +class AnalyticsFrontendAPIConfiguration: # pylint: disable=too-many-instance-attributes,too-few-public-methods """Configuration for AnalyticsFrontendAPI. Note that all parameters used to create this instance are saved as instance diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/__init__.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/__init__.py index b900d16f984..006671542af 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/__init__.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/__init__.py @@ -3,4 +3,3 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- -# flake8: noqa diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/serialization.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/serialization.py index 2ce3b56ff79..aae648be171 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/serialization.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/serialization.py @@ -1,10 +1,10 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines,protected-access,broad-exception-caught +# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- -# pylint: disable=line-too-long,useless-suppression,too-many-lines,protected-access,broad-exception-caught,multiple-statements -# flake8: noqa -# coding=utf-8 + # pyright: reportUnnecessaryTypeIgnoreComment=false from base64 import b64decode, b64encode @@ -1786,11 +1786,12 @@ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return- tuple())): return data - def is_a_text_parsing_type(x): return x not in [ # pylint: disable=unnecessary-lambda-assignment - "object", - "[]", - r"{}", - ] + def is_a_text_parsing_type(x): # pylint: disable=unnecessary-lambda-assignment + return x not in [ + "object", + "[]", + r"{}", + ] if isinstance(data, ET.Element) and is_a_text_parsing_type( data_type) and not data.text: return None diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/__init__.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/__init__.py index 3578f877ac3..e3229dff489 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/__init__.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/__init__.py @@ -4,13 +4,11 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position -# flake8: noqa - from typing import TYPE_CHECKING if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import * # pylint: disable=unused-wildcard-import # noqa: F403 from ._client import AnalyticsFrontendAPI # type: ignore diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_configuration.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_configuration.py index 10313c63deb..f5705385f6c 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_configuration.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_configuration.py @@ -3,7 +3,6 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- -# pylint :disable=too-few-public-methods from typing import Any @@ -12,7 +11,7 @@ VERSION = "unknown" -class AnalyticsFrontendAPIConfiguration: # pylint: disable=too-many-instance-attributes, too-few-public-methods +class AnalyticsFrontendAPIConfiguration: # pylint: disable=too-many-instance-attributes,too-few-public-methods """Configuration for AnalyticsFrontendAPI. Note that all parameters used to create this instance are saved as instance diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_patch.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_patch.py index e51d96769fd..2bd950a309c 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_patch.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_patch.py @@ -3,8 +3,6 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- -# flake8: noqa - """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/__init__.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/__init__.py index 5edce0373ad..91ecd54c4e2 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/__init__.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/__init__.py @@ -4,13 +4,11 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position -# flake8: noqa - from typing import TYPE_CHECKING if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import * # pylint: disable=unused-wildcard-import # noqa: F403 from ._operations import CollaborationOperations # type: ignore diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_operations.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_operations.py index fd87da6a1d0..b61bbc3e5d9 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_operations.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_operations.py @@ -1,10 +1,9 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- -# pylint: disable=too-many-lines - from collections.abc import MutableMapping from io import IOBase from typing import Any, Callable, IO, Optional, TypeVar, Union, cast, overload @@ -29,19 +28,17 @@ build_collaboration_analytics_cleanroompolicy_get_request, build_collaboration_analytics_dataset_document_id_get_request, build_collaboration_analytics_dataset_document_id_publish_post_request, + build_collaboration_analytics_datasets_document_id_queries_get_request, build_collaboration_analytics_datasets_list_get_request, - build_collaboration_analytics_deployment_info_get_request, build_collaboration_analytics_get_request, build_collaboration_analytics_queries_document_id_get_request, build_collaboration_analytics_queries_document_id_publish_post_request, build_collaboration_analytics_queries_document_id_run_post_request, build_collaboration_analytics_queries_document_id_runhistory_get_request, - build_collaboration_analytics_queries_document_id_vote_accept_post_request, - build_collaboration_analytics_queries_document_id_vote_reject_post_request, + build_collaboration_analytics_queries_document_id_vote_post_request, build_collaboration_analytics_queries_jobid_get_request, build_collaboration_analytics_queries_list_get_request, - build_collaboration_attestationreport_cgs_get_request, - build_collaboration_attestationreport_cleanroom_get_request, + build_collaboration_analytics_secrets_secret_name_put_request, build_collaboration_check_consent_document_id_get_request, build_collaboration_id_get_request, build_collaboration_invitation_id_accept_post_request, @@ -49,8 +46,10 @@ build_collaboration_invitations_get_request, build_collaboration_list_request, build_collaboration_oidc_issuer_info_get_request, - build_collaboration_set_consent_document_id_consent_action_post_request, - build_collaboration_workloads_get_request, + build_collaboration_oidc_keys_get_request, + build_collaboration_oidc_set_issuer_url_post_request, + build_collaboration_report_get_request, + build_collaboration_set_consent_document_id_put_request, ) from .._configuration import AnalyticsFrontendAPIConfiguration @@ -85,16 +84,18 @@ def __init__(self, *args, **kwargs) -> None: @distributed_trace_async async def list( self, - body: Optional[Any] = None, - **kwargs: Any) -> List[str]: + *, + active_only: bool = False, + **kwargs: Any) -> List[JSON]: """List all collaborations. List all collaborations. - :param body: Default value is None. - :type body: any - :return: list of str - :rtype: list[str] + :keyword active_only: When true, returns only active collaborations (email-only lookup). When + false or omitted, returns all collaborations. Default value is False. + :paramtype active_only: bool + :return: list of JSON object + :rtype: list[JSON] :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -102,7 +103,11 @@ async def list( # response body for status code(s): 200 response == [ - "str" + { + "collaborationId": "str", + "collaborationName": "str", + "userStatus": "str" + } ] """ error_map: MutableMapping = { @@ -113,23 +118,13 @@ async def list( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[List[str]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None + cls: ClsType[List[JSON]] = kwargs.pop("cls", None) _request = build_collaboration_list_request( - content_type=content_type, - json=_json, + active_only=active_only, headers=_headers, params=_params, ) @@ -158,17 +153,18 @@ async def list( return cls( pipeline_response, cast( - List[str], + List[JSON], deserialized), {}) # type: ignore - return cast(List[str], deserialized) # type: ignore + return cast(List[JSON], deserialized) # type: ignore @distributed_trace_async async def id_get( self, collaboration_id: str, - body: Optional[Any] = None, + *, + active_only: bool = False, **kwargs: Any) -> JSON: """Get collaboration by id. @@ -176,8 +172,9 @@ async def id_get( :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any + :keyword active_only: When true, queries only the email-based table (active collaborations). + When false or omitted, queries all tables. Default value is False. + :paramtype active_only: bool :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -188,9 +185,7 @@ async def id_get( # response body for status code(s): 200 response == { "collaborationId": "str", - "consortiumEndpoint": "str", - "consortiumServiceCertificatePem": "str", - "userEmail": "str", + "collaborationName": "str", "userStatus": "str" } # response body for status code(s): 422 @@ -210,24 +205,14 @@ async def id_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_id_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, + active_only=active_only, headers=_headers, params=_params, ) @@ -263,28 +248,56 @@ async def id_get( return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def workloads_get( - self, collaboration_id: str, body: Optional[Any] = None, **kwargs: Any - ) -> Union[List[str], JSON]: - """List all collaboration workloads. + async def report_get(self, collaboration_id: str, **kwargs: Any) -> JSON: + """Get collaboration report. - List all collaboration workloads. + Get collaboration report. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any - :return: list of str or JSON object - :rtype: list[str] or JSON + :return: JSON object + :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response == [ - "str" - ] + response == { + "cgs": { + "cgsEndpoint": "str", + "recoveryAgentEndpoint": "str", + "report": { + "platform": "str", + "reportDataPayload": "str", + "report": { + "attestation": "str", + "platformCertificates": "str", + "serviceCert": "str", + "uvmEndorsements": "str" + } + } + }, + "consortiumManager": { + "endpoint": "str", + "report": { + "platform": "str", + "serviceCert": "str", + "hostData": "str", + "report": { + "attestation": "str", + "platformCertificates": "str", + "serviceCert": "str", + "uvmEndorsements": "str" + } + } + } + } + # response body for status code(s): 400 + response == { + "error": "str", + "message": "str" + } # response body for status code(s): 422 response == { "loc": [ @@ -302,24 +315,13 @@ async def workloads_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[Union[List[str], JSON]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None + cls: ClsType[JSON] = kwargs.pop("cls", None) - _request = build_collaboration_workloads_get_request( + _request = build_collaboration_report_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -332,7 +334,7 @@ async def workloads_get( response = pipeline_response.http_response - if response.status_code not in [200, 422]: + if response.status_code not in [200, 400, 422]: map_error( status_code=response.status_code, response=response, @@ -345,16 +347,19 @@ async def workloads_get( deserialized = None if cls: - return cls(pipeline_response, cast( - Union[List[str], JSON], deserialized), {}) # type: ignore + return cls( + pipeline_response, + cast( + JSON, + deserialized), + {}) # type: ignore - return cast(Union[List[str], JSON], deserialized) # type: ignore + return cast(JSON, deserialized) # type: ignore @distributed_trace_async async def analytics_get( self, collaboration_id: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: """Get collaboration analytics workload. @@ -362,8 +367,6 @@ async def analytics_get( :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -373,7 +376,7 @@ async def analytics_get( # response body for status code(s): 200 response == { - "data": {}, + "data": "str", "id": "str", "state": "str", "version": "str" @@ -395,24 +398,13 @@ async def analytics_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_analytics_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -448,17 +440,14 @@ async def analytics_get( return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def analytics_deployment_info_get( - self, collaboration_id: str, body: Optional[Any] = None, **kwargs: Any - ) -> JSON: - """Get collaboration analytics deploymentInfo. + async def analytics_cleanroompolicy_get( + self, collaboration_id: str, **kwargs: Any) -> JSON: + """Get collaboration analytics cleanroompolicy. - Get collaboration analytics deploymentInfo. + Get collaboration analytics cleanroompolicy. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -468,7 +457,14 @@ async def analytics_deployment_info_get( # response body for status code(s): 200 response == { - "data": {} + "claims": { + "claims": { + "str": {} + } + }, + "proposalIds": [ + "str" + ] } # response body for status code(s): 422 response == { @@ -487,24 +483,13 @@ async def analytics_deployment_info_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - - _request = build_collaboration_analytics_deployment_info_get_request( + _request = build_collaboration_analytics_cleanroompolicy_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -540,17 +525,16 @@ async def analytics_deployment_info_get( return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def analytics_cleanroompolicy_get( - self, collaboration_id: str, body: Optional[Any] = None, **kwargs: Any - ) -> JSON: - """Get collaboration analytics cleanroompolicy. + async def oidc_issuer_info_get( + self, + collaboration_id: str, + **kwargs: Any) -> JSON: + """Get collaboration oidcissuer. - Get collaboration analytics cleanroompolicy. + Get collaboration oidcissuer. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -560,8 +544,12 @@ async def analytics_cleanroompolicy_get( # response body for status code(s): 200 response == { - "policy": {}, - "proposalId": "str" + "enabled": bool, + "issuerUrl": "str", + "tenantData": { + "issuerUrl": "str", + "tenantId": "str" + } } # response body for status code(s): 422 response == { @@ -580,24 +568,13 @@ async def analytics_cleanroompolicy_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - - _request = build_collaboration_analytics_cleanroompolicy_get_request( + _request = build_collaboration_oidc_issuer_info_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -632,20 +609,26 @@ async def analytics_cleanroompolicy_get( return cast(JSON, deserialized) # type: ignore - @distributed_trace_async - async def oidc_issuer_info_get( - self, - collaboration_id: str, - body: Optional[Any] = None, - **kwargs: Any) -> JSON: - """Get collaboration oidcissuer. + @overload + async def oidc_set_issuer_url_post( + self, + collaboration_id: str, + body: Optional[JSON] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> JSON: + """Set collaboration oidc issuer url. - Get collaboration oidcissuer. + Set collaboration oidc issuer url. :param collaboration_id: Required. :type collaboration_id: str :param body: Default value is None. - :type body: any + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -653,14 +636,98 @@ async def oidc_issuer_info_get( Example: .. code-block:: python + # JSON input template you can fill out and use as your body input. + body = { + "url": "str" + } + # response body for status code(s): 200 response == { - "enabled": bool, - "issuerUrl": "str", - "tenantData": { - "issuerUrl": "str", - "tenantId": "str" - } + "message": "str", + "url": "str" + } + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + + @overload + async def oidc_set_issuer_url_post( + self, + collaboration_id: str, + body: Optional[IO[bytes]] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> JSON: + """Set collaboration oidc issuer url. + + Set collaboration oidc issuer url. + + :param collaboration_id: Required. + :type collaboration_id: str + :param body: Default value is None. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "message": "str", + "url": "str" + } + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + + @distributed_trace_async + async def oidc_set_issuer_url_post(self, + collaboration_id: str, + body: Optional[Union[JSON, + IO[bytes]]] = None, + **kwargs: Any) -> JSON: + """Set collaboration oidc issuer url. + + Set collaboration oidc issuer url. + + :param collaboration_id: Required. + :type collaboration_id: str + :param body: Is either a JSON type or a IO[bytes] type. Default value is None. + :type body: JSON or IO[bytes] + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + body = { + "url": "str" + } + + # response body for status code(s): 200 + response == { + "message": "str", + "url": "str" } # response body for status code(s): 422 response == { @@ -683,20 +750,26 @@ async def oidc_issuer_info_get( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) + "content_type", _headers.pop("Content-Type", None)) content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body + content_type = content_type or "application/json" if body else None + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + if body is not None: + _json = body + else: + _json = None - _request = build_collaboration_oidc_issuer_info_get_request( + _request = build_collaboration_oidc_set_issuer_url_post_request( collaboration_id=collaboration_id, content_type=content_type, json=_json, + content=_content, headers=_headers, params=_params, ) @@ -732,28 +805,41 @@ async def oidc_issuer_info_get( return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def invitations_get( - self, collaboration_id: str, body: Optional[Any] = None, **kwargs: Any - ) -> Union[List[str], JSON]: - """List all invitations. + async def oidc_keys_get( + self, + collaboration_id: str, + **kwargs: Any) -> JSON: + """Get collaboration oidc signing keys (JWKS). - List all invitations. + Get collaboration oidc signing keys (JWKS). :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any - :return: list of str or JSON object - :rtype: list[str] or JSON + :return: JSON object + :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response == [ - "str" - ] + response == { + "keys": [ + { + "kty": "str", + "alg": "str", + "e": "str", + "kid": "str", + "n": "str", + "use": "str", + "x5c": [ + "str" + ], + "x5t": "str", + "x5t #S256": "str" + } + ] + } # response body for status code(s): 422 response == { "loc": [ @@ -771,24 +857,13 @@ async def invitations_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[Union[List[str], JSON]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None + cls: ClsType[JSON] = kwargs.pop("cls", None) - _request = build_collaboration_invitations_get_request( + _request = build_collaboration_oidc_keys_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -814,17 +889,108 @@ async def invitations_get( deserialized = None if cls: - return cls(pipeline_response, cast( - Union[List[str], JSON], deserialized), {}) # type: ignore + return cls( + pipeline_response, + cast( + JSON, + deserialized), + {}) # type: ignore - return cast(Union[List[str], JSON], deserialized) # type: ignore + return cast(JSON, deserialized) # type: ignore + + @distributed_trace_async + async def invitations_get( + self, + collaboration_id: str, + *, + pending_only: bool = False, + **kwargs: Any) -> JSON: + """List all invitations. + + List all invitations. + + :param collaboration_id: Required. + :type collaboration_id: str + :keyword pending_only: When true, returns only invitations where the user's status is not + Active. When false or omitted, returns all matching invitations. Default value is False. + :paramtype pending_only: bool + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "accountType": "str", + "invitationId": "str", + "status": "str" + } + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[JSON] = kwargs.pop("cls", None) + + _request = build_collaboration_invitations_get_request( + collaboration_id=collaboration_id, + pending_only=pending_only, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 422]: + map_error( + status_code=response.status_code, + response=response, + error_map=error_map) + raise HttpResponseError(response=response) + + if response.content: + deserialized = response.json() + else: + deserialized = None + + if cls: + return cls( + pipeline_response, + cast( + JSON, + deserialized), + {}) # type: ignore + + return cast(JSON, deserialized) # type: ignore @distributed_trace_async async def invitation_id_get( self, collaboration_id: str, invitation_id: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: """Get invitation by id. @@ -834,8 +1000,6 @@ async def invitation_id_get( :type collaboration_id: str :param invitation_id: Required. :type invitation_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -872,25 +1036,14 @@ async def invitation_id_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_invitation_id_get_request( collaboration_id=collaboration_id, invitation_id=invitation_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -926,12 +1079,9 @@ async def invitation_id_get( return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def invitation_id_accept_post(self, - collaboration_id: str, - invitation_id: str, - body: Optional[Any] = None, - **kwargs: Any) -> Union[Any, - JSON]: + async def invitation_id_accept_post( + self, collaboration_id: str, invitation_id: str, **kwargs: Any + ) -> Optional[JSON]: """Accept invitation by id. Accept invitation by id. @@ -940,10 +1090,8 @@ async def invitation_id_accept_post(self, :type collaboration_id: str :param invitation_id: Required. :type invitation_id: str - :param body: Default value is None. - :type body: any - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -966,25 +1114,14 @@ async def invitation_id_accept_post(self, } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None + cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) _request = build_collaboration_invitation_id_accept_post_request( collaboration_id=collaboration_id, invitation_id=invitation_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -997,47 +1134,52 @@ async def invitation_id_accept_post(self, response = pipeline_response.http_response - if response.status_code not in [200, 422]: + if response.status_code not in [204, 422]: map_error( status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if response.content: - deserialized = response.json() - else: - deserialized = None + deserialized = None + if response.status_code == 422: + if response.content: + deserialized = response.json() + else: + deserialized = None if cls: - return cls(pipeline_response, cast( - Union[Any, JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore - return cast(Union[Any, JSON], deserialized) # type: ignore + return deserialized # type: ignore @distributed_trace_async async def analytics_datasets_list_get( - self, collaboration_id: str, body: Optional[Any] = None, **kwargs: Any - ) -> Union[List[str], JSON]: + self, collaboration_id: str, **kwargs: Any) -> JSON: """List all datasets. List all datasets. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any - :return: list of str or JSON object - :rtype: list[str] or JSON + :return: JSON object + :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response == [ - "str" - ] + response == { + "value": [ + { + "id": "str", + "labels": { + "str": "str" + } + } + ] + } # response body for status code(s): 422 response == { "loc": [ @@ -1055,24 +1197,13 @@ async def analytics_datasets_list_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[Union[List[str], JSON]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None + cls: ClsType[JSON] = kwargs.pop("cls", None) _request = build_collaboration_analytics_datasets_list_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -1098,17 +1229,20 @@ async def analytics_datasets_list_get( deserialized = None if cls: - return cls(pipeline_response, cast( - Union[List[str], JSON], deserialized), {}) # type: ignore + return cls( + pipeline_response, + cast( + JSON, + deserialized), + {}) # type: ignore - return cast(Union[List[str], JSON], deserialized) # type: ignore + return cast(JSON, deserialized) # type: ignore @distributed_trace_async async def analytics_dataset_document_id_get( self, collaboration_id: str, document_id: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: """Get dataset by id. @@ -1118,8 +1252,6 @@ async def analytics_dataset_document_id_get( :type collaboration_id: str :param document_id: Required. :type document_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -1130,77 +1262,6 @@ async def analytics_dataset_document_id_get( # response body for status code(s): 200 response == { "data": { - "datasetAccessPoint": { - "name": "str", - "path": "str", - "protection": { - "proxyMode": "str", - "proxyType": "str", - "configuration": "", - "encryptionSecretAccessIdentity": { - "clientId": "str", - "name": "str", - "tenantId": "str", - "tokenIssuer": {} - }, - "encryptionSecrets": { - "dek": { - "name": "str", - "secret": { - "backingResource": { - "id": "str", - "name": "str", - "provider": { - "protocol": - "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "secretType": "str" - } - }, - "kek": { - "name": "str", - "secret": { - "backingResource": { - "id": "str", - "name": "str", - "provider": { - "protocol": - "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "secretType": "str" - } - } - }, - "privacyPolicy": { - "policy": {} - } - }, - "store": { - "id": "str", - "name": "str", - "provider": { - "protocol": "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "type": "str", - "identity": { - "clientId": "str", - "name": "str", - "tenantId": "str", - "tokenIssuer": {} - } - }, "datasetAccessPolicy": { "accessMode": "str", "allowedFields": [ @@ -1216,7 +1277,30 @@ async def analytics_dataset_document_id_get( ], "format": "str" }, - "name": "str" + "name": "str", + "store": { + "containerName": "str", + "encryptionMode": "str", + "storageAccountType": "str", + "storageAccountUrl": "str", + "awsCgsSecretId": "str" + }, + "dek": { + "keyVaultUrl": "str", + "secretId": "str", + "maaUrl": "str" + }, + "identity": { + "clientId": "str", + "issuerUrl": "str", + "name": "str", + "tenantId": "str" + }, + "kek": { + "keyVaultUrl": "str", + "secretId": "str", + "maaUrl": "str" + } }, "id": "str", "proposerId": "str", @@ -1240,25 +1324,14 @@ async def analytics_dataset_document_id_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_analytics_dataset_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -1302,7 +1375,7 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Publish dataset by id. Publish dataset by id. @@ -1316,8 +1389,8 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -1325,96 +1398,45 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to # JSON input template you can fill out and use as your body input. body = { - "data": { - "datasetAccessPoint": { - "name": "str", - "path": "str", - "protection": { - "proxyMode": "str", - "proxyType": "str", - "configuration": "", - "encryptionSecretAccessIdentity": { - "clientId": "str", - "name": "str", - "tenantId": "str", - "tokenIssuer": {} - }, - "encryptionSecrets": { - "dek": { - "name": "str", - "secret": { - "backingResource": { - "id": "str", - "name": "str", - "provider": { - "protocol": - "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "secretType": "str" - } - }, - "kek": { - "name": "str", - "secret": { - "backingResource": { - "id": "str", - "name": "str", - "provider": { - "protocol": - "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "secretType": "str" - } - } - }, - "privacyPolicy": { - "policy": {} - } - }, - "store": { - "id": "str", - "name": "str", - "provider": { - "protocol": "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "type": "str", - "identity": { - "clientId": "str", - "name": "str", - "tenantId": "str", - "tokenIssuer": {} + "datasetAccessPolicy": { + "accessMode": "str", + "allowedFields": [ + "str" + ] + }, + "datasetSchema": { + "fields": [ + { + "fieldName": "str", + "fieldType": "str" } - }, - "datasetAccessPolicy": { - "accessMode": "str", - "allowedFields": [ - "str" - ] - }, - "datasetSchema": { - "fields": [ - { - "fieldName": "str", - "fieldType": "str" - } - ], - "format": "str" - }, - "name": "str" + ], + "format": "str" }, - "version": "str" + "name": "str", + "store": { + "containerName": "str", + "encryptionMode": "str", + "storageAccountType": "str", + "storageAccountUrl": "str", + "awsCgsSecretId": "str" + }, + "dek": { + "keyVaultUrl": "str", + "secretId": "str", + "maaUrl": "str" + }, + "identity": { + "clientId": "str", + "issuerUrl": "str", + "name": "str", + "tenantId": "str" + }, + "kek": { + "keyVaultUrl": "str", + "secretId": "str", + "maaUrl": "str" + } } # response body for status code(s): 422 @@ -1436,7 +1458,7 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Publish dataset by id. Publish dataset by id. @@ -1450,8 +1472,8 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -1470,7 +1492,7 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to @distributed_trace_async async def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long self, collaboration_id: str, document_id: str, body: Union[JSON, IO[bytes]], **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Publish dataset by id. Publish dataset by id. @@ -1481,8 +1503,8 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to :type document_id: str :param body: Is either a JSON type or a IO[bytes] type. Required. :type body: JSON or IO[bytes] - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -1490,96 +1512,45 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to # JSON input template you can fill out and use as your body input. body = { - "data": { - "datasetAccessPoint": { - "name": "str", - "path": "str", - "protection": { - "proxyMode": "str", - "proxyType": "str", - "configuration": "", - "encryptionSecretAccessIdentity": { - "clientId": "str", - "name": "str", - "tenantId": "str", - "tokenIssuer": {} - }, - "encryptionSecrets": { - "dek": { - "name": "str", - "secret": { - "backingResource": { - "id": "str", - "name": "str", - "provider": { - "protocol": - "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "secretType": "str" - } - }, - "kek": { - "name": "str", - "secret": { - "backingResource": { - "id": "str", - "name": "str", - "provider": { - "protocol": - "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "secretType": "str" - } - } - }, - "privacyPolicy": { - "policy": {} - } - }, - "store": { - "id": "str", - "name": "str", - "provider": { - "protocol": "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "type": "str", - "identity": { - "clientId": "str", - "name": "str", - "tenantId": "str", - "tokenIssuer": {} + "datasetAccessPolicy": { + "accessMode": "str", + "allowedFields": [ + "str" + ] + }, + "datasetSchema": { + "fields": [ + { + "fieldName": "str", + "fieldType": "str" } - }, - "datasetAccessPolicy": { - "accessMode": "str", - "allowedFields": [ - "str" - ] - }, - "datasetSchema": { - "fields": [ - { - "fieldName": "str", - "fieldType": "str" - } - ], - "format": "str" - }, - "name": "str" + ], + "format": "str" }, - "version": "str" + "name": "str", + "store": { + "containerName": "str", + "encryptionMode": "str", + "storageAccountType": "str", + "storageAccountUrl": "str", + "awsCgsSecretId": "str" + }, + "dek": { + "keyVaultUrl": "str", + "secretId": "str", + "maaUrl": "str" + }, + "identity": { + "clientId": "str", + "issuerUrl": "str", + "name": "str", + "tenantId": "str" + }, + "kek": { + "keyVaultUrl": "str", + "secretId": "str", + "maaUrl": "str" + } } # response body for status code(s): 422 @@ -1604,7 +1575,7 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) + cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1632,30 +1603,30 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to response = pipeline_response.http_response - if response.status_code not in [200, 422]: + if response.status_code not in [204, 422]: map_error( status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if response.content: - deserialized = response.json() - else: - deserialized = None + deserialized = None + if response.status_code == 422: + if response.content: + deserialized = response.json() + else: + deserialized = None if cls: - return cls(pipeline_response, cast( - Union[Any, JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore - return cast(Union[Any, JSON], deserialized) # type: ignore + return deserialized # type: ignore @distributed_trace_async async def check_consent_document_id_get( self, collaboration_id: str, document_id: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: """Check execution consent by ID of the Query or the Dataset. @@ -1665,8 +1636,6 @@ async def check_consent_document_id_get( :type collaboration_id: str :param document_id: Required. :type document_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -1699,25 +1668,14 @@ async def check_consent_document_id_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_check_consent_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -1752,22 +1710,74 @@ async def check_consent_document_id_get( return cast(JSON, deserialized) # type: ignore - @distributed_trace_async - async def set_consent_document_id_consent_action_post( # pylint: disable=name-too-long - self, collaboration_id: str, document_id: str, consent_action: str, body: Optional[Any] = None, **kwargs: Any + @overload + async def set_consent_document_id_put( + self, + collaboration_id: str, + document_id: str, + body: JSON, + *, + content_type: str = "application/json", + **kwargs: Any ) -> Union[Any, JSON]: - """Set execution consent (accept / reject) by ID of the Query or the Dataset. + """Set execution consent (enable / disable) by ID of the Query or the Dataset. - Set execution consent (accept / reject) by ID of the Query or the Dataset. + Set execution consent (enable / disable) by ID of the Query or the Dataset. :param collaboration_id: Required. :type collaboration_id: str :param document_id: Required. :type document_id: str - :param consent_action: Known values are: "enable" and "disable". Required. - :type consent_action: str - :param body: Default value is None. - :type body: any + :param body: Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: any or JSON object + :rtype: any or JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + body = { + "consentAction": "str" + } + + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + + @overload + async def set_consent_document_id_put( + self, + collaboration_id: str, + document_id: str, + body: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> Union[Any, JSON]: + """Set execution consent (enable / disable) by ID of the Query or the Dataset. + + Set execution consent (enable / disable) by ID of the Query or the Dataset. + + :param collaboration_id: Required. + :type collaboration_id: str + :param document_id: Required. + :type document_id: str + :param body: Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :return: any or JSON object :rtype: any or JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -1784,6 +1794,46 @@ async def set_consent_document_id_consent_action_post( # pylint: disable=name-t "type": "str" } """ + + @distributed_trace_async + async def set_consent_document_id_put(self, + collaboration_id: str, + document_id: str, + body: Union[JSON, + IO[bytes]], + **kwargs: Any) -> Union[Any, + JSON]: + """Set execution consent (enable / disable) by ID of the Query or the Dataset. + + Set execution consent (enable / disable) by ID of the Query or the Dataset. + + :param collaboration_id: Required. + :type collaboration_id: str + :param document_id: Required. + :type document_id: str + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :return: any or JSON object + :rtype: any or JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + body = { + "consentAction": "str" + } + + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1796,22 +1846,23 @@ async def set_consent_document_id_consent_action_post( # pylint: disable=name-t _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None + "content_type", _headers.pop("Content-Type", None)) cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) - if body is not None: - _json = body + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + _json = body - _request = build_collaboration_set_consent_document_id_consent_action_post_request( + _request = build_collaboration_set_consent_document_id_put_request( collaboration_id=collaboration_id, document_id=document_id, - consent_action=consent_action, content_type=content_type, json=_json, + content=_content, headers=_headers, params=_params, ) @@ -1851,7 +1902,7 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Publish query by id. Publish query by id. @@ -1865,8 +1916,8 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -1874,36 +1925,16 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to # JSON input template you can fill out and use as your body input. body = { - "inputDatasets": [ + "inputDatasets": "str", + "outputDataset": "str", + "queryData": [ { - "datasetDocumentId": "str", - "view": "str" + "data": "str", + "executionSequence": 0, + "postFilters": "str", + "preConditions": "str" } - ], - "outputDataset": { - "datasetDocumentId": "str", - "view": "str" - }, - "queryData": { - "segments": [ - { - "data": "str", - "executionSequence": 0, - "postFilters": [ - { - "columnName": "str", - "value": 0 - } - ], - "preConditions": [ - { - "minRowCount": 0, - "viewName": "str" - } - ] - } - ] - } + ] } # response body for status code(s): 422 @@ -1925,7 +1956,7 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Publish query by id. Publish query by id. @@ -1939,8 +1970,8 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -1959,7 +1990,7 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to @distributed_trace_async async def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long self, collaboration_id: str, document_id: str, body: Union[JSON, IO[bytes]], **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Publish query by id. Publish query by id. @@ -1970,8 +2001,8 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to :type document_id: str :param body: Is either a JSON type or a IO[bytes] type. Required. :type body: JSON or IO[bytes] - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -1979,36 +2010,16 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to # JSON input template you can fill out and use as your body input. body = { - "inputDatasets": [ + "inputDatasets": "str", + "outputDataset": "str", + "queryData": [ { - "datasetDocumentId": "str", - "view": "str" + "data": "str", + "executionSequence": 0, + "postFilters": "str", + "preConditions": "str" } - ], - "outputDataset": { - "datasetDocumentId": "str", - "view": "str" - }, - "queryData": { - "segments": [ - { - "data": "str", - "executionSequence": 0, - "postFilters": [ - { - "columnName": "str", - "value": 0 - } - ], - "preConditions": [ - { - "minRowCount": 0, - "viewName": "str" - } - ] - } - ] - } + ] } # response body for status code(s): 422 @@ -2033,7 +2044,7 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) + cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2061,129 +2072,36 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to response = pipeline_response.http_response - if response.status_code not in [200, 422]: + if response.status_code not in [204, 422]: map_error( status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if response.content: - deserialized = response.json() - else: - deserialized = None + deserialized = None + if response.status_code == 422: + if response.content: + deserialized = response.json() + else: + deserialized = None if cls: - return cls(pipeline_response, cast( - Union[Any, JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore - return cast(Union[Any, JSON], deserialized) # type: ignore + return deserialized # type: ignore @distributed_trace_async async def analytics_queries_list_get( - self, collaboration_id: str, body: Optional[Any] = None, **kwargs: Any - ) -> Union[List[str], JSON]: - """List all queries. - - List all queries. - - :param collaboration_id: Required. - :type collaboration_id: str - :param body: Default value is None. - :type body: any - :return: list of str or JSON object - :rtype: list[str] or JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == [ - "str" - ] - # response body for status code(s): 422 - response == { - "loc": [ - {} - ], - "msg": "str", - "type": "str" - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[Union[List[str], JSON]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None - - _request = build_collaboration_analytics_queries_list_get_request( - collaboration_id=collaboration_id, - content_type=content_type, - json=_json, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast( - Union[List[str], JSON], deserialized), {}) # type: ignore - - return cast(Union[List[str], JSON], deserialized) # type: ignore - - @distributed_trace_async - async def analytics_queries_document_id_get( self, collaboration_id: str, - document_id: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: - """Get query by id. + """List all queries. - Get query by id. + List all queries. :param collaboration_id: Required. :type collaboration_id: str - :param document_id: Required. - :type document_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -2193,31 +2111,14 @@ async def analytics_queries_document_id_get( # response body for status code(s): 200 response == { - "approvers": [ + "value": [ { - "approverId": "str", - "approverIdType": "str" - } - ], - "data": { - "applicationType": "str", - "inputDataset": [ - { - "specification": "str", - "view": "str" + "id": "str", + "labels": { + "str": "str" } - ], - "outputDataset": { - "specification": "str", - "view": "str" - }, - "query": "str" - }, - "id": "str", - "proposalId": "str", - "proposerId": "str", - "state": "str", - "version": "str" + } + ] } # response body for status code(s): 422 response == { @@ -2236,25 +2137,13 @@ async def analytics_queries_document_id_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - - _request = build_collaboration_analytics_queries_document_id_get_request( + _request = build_collaboration_analytics_queries_list_get_request( collaboration_id=collaboration_id, - document_id=document_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -2289,117 +2178,53 @@ async def analytics_queries_document_id_get( return cast(JSON, deserialized) # type: ignore - @overload - async def analytics_queries_document_id_vote_accept_post( # pylint: disable=name-too-long - self, - collaboration_id: str, - document_id: str, - body: Optional[JSON] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> Union[Any, JSON]: - """Vote accept on query by id. - - Vote accept on query by id. - - :param collaboration_id: Required. - :type collaboration_id: str - :param document_id: Required. - :type document_id: str - :param body: Default value is None. - :type body: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "proposalId": "str" - } - - # response body for status code(s): 422 - response == { - "loc": [ - {} - ], - "msg": "str", - "type": "str" - } - """ - - @overload - async def analytics_queries_document_id_vote_accept_post( # pylint: disable=name-too-long - self, - collaboration_id: str, - document_id: str, - body: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> Union[Any, JSON]: - """Vote accept on query by id. - - Vote accept on query by id. - - :param collaboration_id: Required. - :type collaboration_id: str - :param document_id: Required. - :type document_id: str - :param body: Default value is None. - :type body: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 422 - response == { - "loc": [ - {} - ], - "msg": "str", - "type": "str" - } - """ - @distributed_trace_async - async def analytics_queries_document_id_vote_accept_post( # pylint: disable=name-too-long - self, collaboration_id: str, document_id: str, body: Optional[Union[JSON, IO[bytes]]] = None, **kwargs: Any - ) -> Union[Any, JSON]: - """Vote accept on query by id. + async def analytics_queries_document_id_get( + self, + collaboration_id: str, + document_id: str, + **kwargs: Any) -> JSON: + """Get query by id. - Vote accept on query by id. + Get query by id. :param collaboration_id: Required. :type collaboration_id: str :param document_id: Required. :type document_id: str - :param body: Is either a JSON type or a IO[bytes] type. Default value is None. - :type body: JSON or IO[bytes] - :return: any or JSON object - :rtype: any or JSON + :return: JSON object + :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python - # JSON input template you can fill out and use as your body input. - body = { - "proposalId": "str" + # response body for status code(s): 200 + response == { + "approvers": [ + { + "approverId": "str", + "approverIdType": "str" + } + ], + "data": { + "inputDatasets": "str", + "outputDataset": "str", + "queryData": [ + { + "data": "str", + "executionSequence": 0, + "postFilters": "str", + "preConditions": "str" + } + ] + }, + "id": "str", + "proposalId": "str", + "proposerId": "str", + "state": "str", + "version": "str" } - # response body for status code(s): 422 response == { "loc": [ @@ -2417,31 +2242,14 @@ async def analytics_queries_document_id_vote_accept_post( # pylint: disable=nam } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) - content_type = content_type if body else None - cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if body else None - _json = None - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - if body is not None: - _json = body - else: - _json = None + cls: ClsType[JSON] = kwargs.pop("cls", None) - _request = build_collaboration_analytics_queries_document_id_vote_accept_post_request( + _request = build_collaboration_analytics_queries_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, - content_type=content_type, - json=_json, - content=_content, headers=_headers, params=_params, ) @@ -2467,13 +2275,17 @@ async def analytics_queries_document_id_vote_accept_post( # pylint: disable=nam deserialized = None if cls: - return cls(pipeline_response, cast( - Union[Any, JSON], deserialized), {}) # type: ignore + return cls( + pipeline_response, + cast( + JSON, + deserialized), + {}) # type: ignore - return cast(Union[Any, JSON], deserialized) # type: ignore + return cast(JSON, deserialized) # type: ignore @overload - async def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too-long + async def analytics_queries_document_id_vote_post( self, collaboration_id: str, document_id: str, @@ -2481,10 +2293,10 @@ async def analytics_queries_document_id_vote_reject_post( # pylint: disable=nam *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: - """Vote reject on query by id. + ) -> Optional[JSON]: + """Vote on query by id. - Vote reject on query by id. + Vote on query by id. :param collaboration_id: Required. :type collaboration_id: str @@ -2495,8 +2307,8 @@ async def analytics_queries_document_id_vote_reject_post( # pylint: disable=nam :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -2504,7 +2316,8 @@ async def analytics_queries_document_id_vote_reject_post( # pylint: disable=nam # JSON input template you can fill out and use as your body input. body = { - "proposalId": "str" + "proposalId": "str", + "voteAction": "str" } # response body for status code(s): 422 @@ -2518,7 +2331,7 @@ async def analytics_queries_document_id_vote_reject_post( # pylint: disable=nam """ @overload - async def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too-long + async def analytics_queries_document_id_vote_post( self, collaboration_id: str, document_id: str, @@ -2526,10 +2339,10 @@ async def analytics_queries_document_id_vote_reject_post( # pylint: disable=nam *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: - """Vote reject on query by id. + ) -> Optional[JSON]: + """Vote on query by id. - Vote reject on query by id. + Vote on query by id. :param collaboration_id: Required. :type collaboration_id: str @@ -2540,8 +2353,8 @@ async def analytics_queries_document_id_vote_reject_post( # pylint: disable=nam :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -2558,12 +2371,12 @@ async def analytics_queries_document_id_vote_reject_post( # pylint: disable=nam """ @distributed_trace_async - async def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too-long + async def analytics_queries_document_id_vote_post( self, collaboration_id: str, document_id: str, body: Optional[Union[JSON, IO[bytes]]] = None, **kwargs: Any - ) -> Union[Any, JSON]: - """Vote reject on query by id. + ) -> Optional[JSON]: + """Vote on query by id. - Vote reject on query by id. + Vote on query by id. :param collaboration_id: Required. :type collaboration_id: str @@ -2571,8 +2384,8 @@ async def analytics_queries_document_id_vote_reject_post( # pylint: disable=nam :type document_id: str :param body: Is either a JSON type or a IO[bytes] type. Default value is None. :type body: JSON or IO[bytes] - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -2580,7 +2393,8 @@ async def analytics_queries_document_id_vote_reject_post( # pylint: disable=nam # JSON input template you can fill out and use as your body input. body = { - "proposalId": "str" + "proposalId": "str", + "voteAction": "str" } # response body for status code(s): 422 @@ -2606,7 +2420,7 @@ async def analytics_queries_document_id_vote_reject_post( # pylint: disable=nam content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop("Content-Type", None)) content_type = content_type if body else None - cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) + cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) content_type = content_type or "application/json" if body else None _json = None @@ -2619,7 +2433,7 @@ async def analytics_queries_document_id_vote_reject_post( # pylint: disable=nam else: _json = None - _request = build_collaboration_analytics_queries_document_id_vote_reject_post_request( + _request = build_collaboration_analytics_queries_document_id_vote_post_request( collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, @@ -2637,23 +2451,24 @@ async def analytics_queries_document_id_vote_reject_post( # pylint: disable=nam response = pipeline_response.http_response - if response.status_code not in [200, 422]: + if response.status_code not in [204, 422]: map_error( status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if response.content: - deserialized = response.json() - else: - deserialized = None + deserialized = None + if response.status_code == 422: + if response.content: + deserialized = response.json() + else: + deserialized = None if cls: - return cls(pipeline_response, cast( - Union[Any, JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore - return cast(Union[Any, JSON], deserialized) # type: ignore + return deserialized # type: ignore @overload async def analytics_queries_document_id_run_post( @@ -2932,18 +2747,15 @@ async def analytics_queries_jobid_get( self, collaboration_id: str, jobid: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: - """Get query run result by run id. + """Get query run result by job id. - Get query run result by run id. + Get query run result by job id. :param collaboration_id: Required. :type collaboration_id: str :param jobid: Required. :type jobid: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -2989,25 +2801,14 @@ async def analytics_queries_jobid_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_analytics_queries_jobid_get_request( collaboration_id=collaboration_id, jobid=jobid, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -3044,7 +2845,7 @@ async def analytics_queries_jobid_get( @distributed_trace_async async def analytics_queries_document_id_runhistory_get( # pylint: disable=name-too-long - self, collaboration_id: str, document_id: str, body: Optional[Any] = None, **kwargs: Any + self, collaboration_id: str, document_id: str, **kwargs: Any ) -> Union[List[JSON], JSON]: """Get query run history by query id. @@ -3054,8 +2855,6 @@ async def analytics_queries_document_id_runhistory_get( # pylint: disable=name- :type collaboration_id: str :param document_id: Required. :type document_id: str - :param body: Default value is None. - :type body: any :return: list of JSON object or JSON object :rtype: list[JSON] or JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -3066,9 +2865,48 @@ async def analytics_queries_document_id_runhistory_get( # pylint: disable=name- # response body for status code(s): 200 response == [ { - "data": {}, "queryId": "str", - "runId": "str" + "runs": [ + { + "isSuccessful": bool, + "runId": "str", + "durationSeconds": 0.0, + "endTime": "2020-02-20 00:00:00", + "error": { + "code": "str", + "message": "str" + }, + "startTime": "2020-02-20 00:00:00", + "stats": { + "rowsRead": 0, + "rowsWritten": 0 + } + } + ], + "latestRun": { + "isSuccessful": bool, + "runId": "str", + "durationSeconds": 0.0, + "endTime": "2020-02-20 00:00:00", + "error": { + "code": "str", + "message": "str" + }, + "startTime": "2020-02-20 00:00:00", + "stats": { + "rowsRead": 0, + "rowsWritten": 0 + } + }, + "summary": { + "avgDurationSeconds": 0.0, + "failedRuns": 0, + "successfulRuns": 0, + "totalRowsRead": 0, + "totalRowsWritten": 0, + "totalRuns": 0, + "totalRuntimeSeconds": 0.0 + } } ] # response body for status code(s): 422 @@ -3088,25 +2926,14 @@ async def analytics_queries_document_id_runhistory_get( # pylint: disable=name- } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[Union[List[JSON], JSON]] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_analytics_queries_document_id_runhistory_get_request( collaboration_id=collaboration_id, document_id=document_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -3138,19 +2965,19 @@ async def analytics_queries_document_id_runhistory_get( # pylint: disable=name- return cast(Union[List[JSON], JSON], deserialized) # type: ignore @distributed_trace_async - async def analytics_auditevents_get( - self, collaboration_id: str, body: Optional[Any] = None, **kwargs: Any - ) -> Union[List[JSON], JSON]: - """Get audit events for analytics workload. + async def analytics_datasets_document_id_queries_get( # pylint: disable=name-too-long + self, collaboration_id: str, document_id: str, **kwargs: Any + ) -> Union[List[str], JSON]: + """Get queries by dataset id. - Get audit events for analytics workload. + Get queries by dataset id. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any - :return: list of JSON object or JSON object - :rtype: list[JSON] or JSON + :param document_id: Required. + :type document_id: str + :return: list of str or JSON object + :rtype: list[str] or JSON :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -3158,13 +2985,7 @@ async def analytics_auditevents_get( # response body for status code(s): 200 response == [ - { - "data": {}, - "id": "str", - "scope": "str", - "timestamp": "str", - "timestampIso": "str" - } + "str" ] # response body for status code(s): 422 response == { @@ -3183,24 +3004,14 @@ async def analytics_auditevents_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[Union[List[JSON], JSON]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None + cls: ClsType[Union[List[str], JSON]] = kwargs.pop("cls", None) - _request = build_collaboration_analytics_auditevents_get_request( + _request = build_collaboration_analytics_datasets_document_id_queries_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, + document_id=document_id, headers=_headers, params=_params, ) @@ -3227,24 +3038,33 @@ async def analytics_auditevents_get( if cls: return cls(pipeline_response, cast( - Union[List[JSON], JSON], deserialized), {}) # type: ignore + Union[List[str], JSON], deserialized), {}) # type: ignore - return cast(Union[List[JSON], JSON], deserialized) # type: ignore + return cast(Union[List[str], JSON], deserialized) # type: ignore - @distributed_trace_async - async def attestationreport_cgs_get( - self, - collaboration_id: str, - body: Optional[Any] = None, - **kwargs: Any) -> JSON: - """Get attestation report from CGS. + @overload + async def analytics_secrets_secret_name_put( + self, + collaboration_id: str, + secret_name: str, + body: Optional[JSON] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> JSON: + """Set secret for analytics workload. - Get attestation report from CGS. + Set secret for analytics workload. :param collaboration_id: Required. :type collaboration_id: str + :param secret_name: Required. + :type secret_name: str :param body: Default value is None. - :type body: any + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -3252,15 +3072,101 @@ async def attestationreport_cgs_get( Example: .. code-block:: python + # JSON input template you can fill out and use as your body input. + body = { + "secretValue": "str" + } + # response body for status code(s): 200 response == { - "platform": "str", - "reportDataPayload": "str", - "report": { - "attestation": "str", - "platformCertificates": "str", - "uvmEndorsements": "str" - } + "secretId": "str" + } + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + + @overload + async def analytics_secrets_secret_name_put( + self, + collaboration_id: str, + secret_name: str, + body: Optional[IO[bytes]] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> JSON: + """Set secret for analytics workload. + + Set secret for analytics workload. + + :param collaboration_id: Required. + :type collaboration_id: str + :param secret_name: Required. + :type secret_name: str + :param body: Default value is None. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "secretId": "str" + } + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + + @distributed_trace_async + async def analytics_secrets_secret_name_put(self, + collaboration_id: str, + secret_name: str, + body: Optional[Union[JSON, + IO[bytes]]] = None, + **kwargs: Any) -> JSON: + """Set secret for analytics workload. + + Set secret for analytics workload. + + :param collaboration_id: Required. + :type collaboration_id: str + :param secret_name: Required. + :type secret_name: str + :param body: Is either a JSON type or a IO[bytes] type. Default value is None. + :type body: JSON or IO[bytes] + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + body = { + "secretValue": "str" + } + + # response body for status code(s): 200 + response == { + "secretId": "str" } # response body for status code(s): 422 response == { @@ -3283,20 +3189,27 @@ async def attestationreport_cgs_get( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) + "content_type", _headers.pop("Content-Type", None)) content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body + content_type = content_type or "application/json" if body else None + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + if body is not None: + _json = body + else: + _json = None - _request = build_collaboration_attestationreport_cgs_get_request( + _request = build_collaboration_analytics_secrets_secret_name_put_request( collaboration_id=collaboration_id, + secret_name=secret_name, content_type=content_type, json=_json, + content=_content, headers=_headers, params=_params, ) @@ -3332,17 +3245,27 @@ async def attestationreport_cgs_get( return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def attestationreport_cleanroom_get( - self, collaboration_id: str, body: Optional[Any] = None, **kwargs: Any + async def analytics_auditevents_get( + self, + collaboration_id: str, + *, + scope: Optional[str] = None, + from_seqno: Optional[str] = None, + to_seqno: Optional[str] = None, + **kwargs: Any ) -> JSON: - """Get attestation report from Cleanroom. + """Get audit events for analytics workload. - Get attestation report from Cleanroom. + Get audit events for analytics workload. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any + :keyword scope: The event scope to query. Default value is None. + :paramtype scope: str + :keyword from_seqno: Start of the ledger sequence number range. Default value is None. + :paramtype from_seqno: str + :keyword to_seqno: End of the ledger sequence number range. Default value is None. + :paramtype to_seqno: str :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -3352,13 +3275,19 @@ async def attestationreport_cleanroom_get( # response body for status code(s): 200 response == { - "platform": "str", - "reportDataPayload": "str", - "report": { - "attestation": "str", - "platformCertificates": "str", - "uvmEndorsements": "str" - } + "nextLink": "str", + "value": [ + { + "data": { + "message": "str", + "source": "str" + }, + "id": "str", + "scope": "str", + "timestamp": "str", + "timestampIso": "str" + } + ] } # response body for status code(s): 422 response == { @@ -3377,24 +3306,16 @@ async def attestationreport_cleanroom_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - - _request = build_collaboration_attestationreport_cleanroom_get_request( + _request = build_collaboration_analytics_auditevents_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, + scope=scope, + from_seqno=from_seqno, + to_seqno=to_seqno, headers=_headers, params=_params, ) diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/__init__.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/__init__.py index 2ab6c65218e..91ecd54c4e2 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/__init__.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/__init__.py @@ -4,12 +4,11 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position -# flake8: noqa from typing import TYPE_CHECKING if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import * # pylint: disable=unused-wildcard-import # noqa: F403 from ._operations import CollaborationOperations # type: ignore diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_operations.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_operations.py index ac19c22f997..b1aa1e61d0a 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_operations.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_operations.py @@ -1,10 +1,9 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- -# pylint: disable=too-many-lines - from collections.abc import MutableMapping from io import IOBase from typing import Any, Callable, IO, Optional, TypeVar, Union, cast, overload @@ -38,40 +37,38 @@ def build_collaboration_list_request( *, - json: Optional[Any] = None, + active_only: bool = False, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL _url = "/collaborations" + # Construct parameters + if active_only is not None: + _params["activeOnly"] = _SERIALIZER.query( + "active_only", active_only, "bool") + # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest( method="GET", url=_url, + params=_params, headers=_headers, - json=json, **kwargs) def build_collaboration_id_get_request( - collaboration_id: str, *, json: Optional[Any] = None, **kwargs: Any + collaboration_id: str, *, active_only: bool = False, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -82,32 +79,31 @@ def build_collaboration_id_get_request( _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + if active_only is not None: + _params["activeOnly"] = _SERIALIZER.query( + "active_only", active_only, "bool") + # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest( method="GET", url=_url, + params=_params, headers=_headers, - json=json, **kwargs) -def build_collaboration_workloads_get_request( # pylint: disable=name-too-long - collaboration_id: str, *, json: Optional[Any] = None, **kwargs: Any -) -> HttpRequest: +def build_collaboration_report_get_request( + collaboration_id: str, + **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/workloads" + _url = "/collaborations/{collaboration_id}/report" path_format_arguments = { "collaboration_id": _SERIALIZER.url( "collaboration_id", collaboration_id, "str"), } @@ -115,27 +111,16 @@ def build_collaboration_workloads_get_request( # pylint: disable=name-too-long _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) def build_collaboration_analytics_get_request( # pylint: disable=name-too-long - collaboration_id: str, *, json: Optional[Any] = None, **kwargs: Any + collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -147,31 +132,20 @@ def build_collaboration_analytics_get_request( # pylint: disable=name-too-long _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) -def build_collaboration_analytics_deployment_info_get_request( # pylint: disable=name-too-long - collaboration_id: str, *, json: Optional[Any] = None, **kwargs: Any +def build_collaboration_analytics_cleanroompolicy_get_request( # pylint: disable=name-too-long + collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/deploymentInfo" + _url = "/collaborations/{collaboration_id}/analytics/cleanroompolicy" path_format_arguments = { "collaboration_id": _SERIALIZER.url( "collaboration_id", collaboration_id, "str"), } @@ -179,31 +153,20 @@ def build_collaboration_analytics_deployment_info_get_request( # pylint: disabl _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) -def build_collaboration_analytics_cleanroompolicy_get_request( # pylint: disable=name-too-long - collaboration_id: str, *, json: Optional[Any] = None, **kwargs: Any +def build_collaboration_oidc_issuer_info_get_request( # pylint: disable=name-too-long + collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/cleanroompolicy" + _url = "/collaborations/{collaboration_id}/oidc/issuerInfo" path_format_arguments = { "collaboration_id": _SERIALIZER.url( "collaboration_id", collaboration_id, "str"), } @@ -211,21 +174,13 @@ def build_collaboration_analytics_cleanroompolicy_get_request( # pylint: disabl _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) -def build_collaboration_oidc_issuer_info_get_request( # pylint: disable=name-too-long - collaboration_id: str, *, json: Optional[Any] = None, **kwargs: Any +def build_collaboration_oidc_set_issuer_url_post_request( # pylint: disable=name-too-long + collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) @@ -235,7 +190,7 @@ def build_collaboration_oidc_issuer_info_get_request( # pylint: disable=name-to accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/oidc/issuerInfo" + _url = "/collaborations/{collaboration_id}/oidc/setIssuerUrl" path_format_arguments = { "collaboration_id": _SERIALIZER.url( "collaboration_id", collaboration_id, "str"), } @@ -248,22 +203,36 @@ def build_collaboration_oidc_issuer_info_get_request( # pylint: disable=name-to "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +def build_collaboration_oidc_keys_get_request( # pylint: disable=name-too-long + collaboration_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/collaborations/{collaboration_id}/oidc/keys" + path_format_arguments = { + "collaboration_id": _SERIALIZER.url( + "collaboration_id", collaboration_id, "str"), } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) def build_collaboration_invitations_get_request( # pylint: disable=name-too-long - collaboration_id: str, *, json: Optional[Any] = None, **kwargs: Any + collaboration_id: str, *, pending_only: bool = False, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -274,28 +243,27 @@ def build_collaboration_invitations_get_request( # pylint: disable=name-too-lon _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + if pending_only is not None: + _params["pendingOnly"] = _SERIALIZER.query( + "pending_only", pending_only, "bool") + # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest( method="GET", url=_url, + params=_params, headers=_headers, - json=json, **kwargs) def build_collaboration_invitation_id_get_request( # pylint: disable=name-too-long - collaboration_id: str, invitation_id: str, *, json: Optional[Any] = None, **kwargs: Any + collaboration_id: str, invitation_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -314,27 +282,16 @@ def build_collaboration_invitation_id_get_request( # pylint: disable=name-too-l _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) def build_collaboration_invitation_id_accept_post_request( # pylint: disable=name-too-long - collaboration_id: str, invitation_id: str, *, json: Optional[Any] = None, **kwargs: Any + collaboration_id: str, invitation_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -353,27 +310,16 @@ def build_collaboration_invitation_id_accept_post_request( # pylint: disable=na _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) def build_collaboration_analytics_datasets_list_get_request( # pylint: disable=name-too-long - collaboration_id: str, *, json: Optional[Any] = None, **kwargs: Any + collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -385,27 +331,16 @@ def build_collaboration_analytics_datasets_list_get_request( # pylint: disable= _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) def build_collaboration_analytics_dataset_document_id_get_request( # pylint: disable=name-too-long - collaboration_id: str, document_id: str, *, json: Optional[Any] = None, **kwargs: Any + collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -418,17 +353,9 @@ def build_collaboration_analytics_dataset_document_id_get_request( # pylint: di _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) def build_collaboration_analytics_dataset_document_id_publish_post_request( # pylint: disable=name-too-long @@ -460,17 +387,14 @@ def build_collaboration_analytics_dataset_document_id_publish_post_request( # p def build_collaboration_check_consent_document_id_get_request( # pylint: disable=name-too-long - collaboration_id: str, document_id: str, *, json: Optional[Any] = None, **kwargs: Any + collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/checkExecutionConsent/{document_id}" + _url = "/collaborations/{collaboration_id}/consent/{document_id}" path_format_arguments = { "collaboration_id": _SERIALIZER.url( "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( @@ -479,21 +403,13 @@ def build_collaboration_check_consent_document_id_get_request( # pylint: disabl _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) -def build_collaboration_set_consent_document_id_consent_action_post_request( # pylint: disable=name-too-long - collaboration_id: str, document_id: str, consent_action: str, *, json: Optional[Any] = None, **kwargs: Any +def build_collaboration_set_consent_document_id_put_request( # pylint: disable=name-too-long + collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) @@ -503,12 +419,11 @@ def build_collaboration_set_consent_document_id_consent_action_post_request( # accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/setExecutionConsent/{document_id}/{consentAction}" + _url = "/collaborations/{collaboration_id}/consent/{document_id}" path_format_arguments = { "collaboration_id": _SERIALIZER.url( "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( - "document_id", document_id, "str"), "consentAction": _SERIALIZER.url( - "consent_action", consent_action, "str"), } + "document_id", document_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -518,12 +433,7 @@ def build_collaboration_set_consent_document_id_consent_action_post_request( # "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="PUT", url=_url, headers=_headers, **kwargs) def build_collaboration_analytics_queries_document_id_publish_post_request( # pylint: disable=name-too-long @@ -555,13 +465,10 @@ def build_collaboration_analytics_queries_document_id_publish_post_request( # p def build_collaboration_analytics_queries_list_get_request( # pylint: disable=name-too-long - collaboration_id: str, *, json: Optional[Any] = None, **kwargs: Any + collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -573,27 +480,16 @@ def build_collaboration_analytics_queries_list_get_request( # pylint: disable=n _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) def build_collaboration_analytics_queries_document_id_get_request( # pylint: disable=name-too-long - collaboration_id: str, document_id: str, *, json: Optional[Any] = None, **kwargs: Any + collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -606,20 +502,12 @@ def build_collaboration_analytics_queries_document_id_get_request( # pylint: di _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) -def build_collaboration_analytics_queries_document_id_vote_accept_post_request( # pylint: disable=name-too-long +def build_collaboration_analytics_queries_document_id_vote_post_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) @@ -630,7 +518,7 @@ def build_collaboration_analytics_queries_document_id_vote_accept_post_request( accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/queries/{document_id}/voteaccept" + _url = "/collaborations/{collaboration_id}/analytics/queries/{document_id}/vote" path_format_arguments = { "collaboration_id": _SERIALIZER.url( "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( @@ -647,7 +535,7 @@ def build_collaboration_analytics_queries_document_id_vote_accept_post_request( return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) -def build_collaboration_analytics_queries_document_id_vote_reject_post_request( # pylint: disable=name-too-long +def build_collaboration_analytics_queries_document_id_run_post_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) @@ -658,7 +546,7 @@ def build_collaboration_analytics_queries_document_id_vote_reject_post_request( accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/queries/{document_id}/votereject" + _url = "/collaborations/{collaboration_id}/analytics/queries/{document_id}/run" path_format_arguments = { "collaboration_id": _SERIALIZER.url( "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( @@ -675,79 +563,59 @@ def build_collaboration_analytics_queries_document_id_vote_reject_post_request( return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) -def build_collaboration_analytics_queries_document_id_run_post_request( # pylint: disable=name-too-long - collaboration_id: str, document_id: str, **kwargs: Any +def build_collaboration_analytics_queries_jobid_get_request( # pylint: disable=name-too-long + collaboration_id: str, jobid: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/queries/{document_id}/run" + _url = "/collaborations/{collaboration_id}/analytics/runs/{jobid}" path_format_arguments = { "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( - "document_id", document_id, "str"), } + "collaboration_id", collaboration_id, "str"), "jobid": _SERIALIZER.url( + "jobid", jobid, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) -def build_collaboration_analytics_queries_jobid_get_request( # pylint: disable=name-too-long - collaboration_id: str, jobid: str, *, json: Optional[Any] = None, **kwargs: Any +def build_collaboration_analytics_queries_document_id_runhistory_get_request( # pylint: disable=name-too-long + collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/runResult/{jobid}" + _url = "/collaborations/{collaboration_id}/analytics/queries/{document_id}/runs" path_format_arguments = { "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "jobid": _SERIALIZER.url( - "jobid", jobid, "str"), } + "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( + "document_id", document_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) -def build_collaboration_analytics_queries_document_id_runhistory_get_request( # pylint: disable=name-too-long - collaboration_id: str, document_id: str, *, json: Optional[Any] = None, **kwargs: Any +def build_collaboration_analytics_datasets_document_id_queries_get_request( # pylint: disable=name-too-long + collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/queries/{document_id}/runHistory" + _url = "/collaborations/{collaboration_id}/analytics/datasets/{document_id}/queries" path_format_arguments = { "collaboration_id": _SERIALIZER.url( "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( @@ -756,21 +624,13 @@ def build_collaboration_analytics_queries_document_id_runhistory_get_request( # _url: str = _url.format(**path_format_arguments) # type: ignore # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) -def build_collaboration_analytics_auditevents_get_request( # pylint: disable=name-too-long - collaboration_id: str, *, json: Optional[Any] = None, **kwargs: Any +def build_collaboration_analytics_secrets_secret_name_put_request( # pylint: disable=name-too-long + collaboration_id: str, secret_name: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) @@ -780,10 +640,11 @@ def build_collaboration_analytics_auditevents_get_request( # pylint: disable=na accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/auditevents" + _url = "/collaborations/{collaboration_id}/analytics/secrets/{secret_name}" path_format_arguments = { "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } + "collaboration_id", collaboration_id, "str"), "secret_name": _SERIALIZER.url( + "secret_name", secret_name, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -793,75 +654,47 @@ def build_collaboration_analytics_auditevents_get_request( # pylint: disable=na "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, - **kwargs) + return HttpRequest(method="PUT", url=_url, headers=_headers, **kwargs) -def build_collaboration_attestationreport_cgs_get_request( # pylint: disable=name-too-long - collaboration_id: str, *, json: Optional[Any] = None, **kwargs: Any +def build_collaboration_analytics_auditevents_get_request( # pylint: disable=name-too-long + collaboration_id: str, + *, + scope: Optional[str] = None, + from_seqno: Optional[str] = None, + to_seqno: Optional[str] = None, + **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/attestationreport/cgs" + _url = "/collaborations/{collaboration_id}/analytics/auditevents" path_format_arguments = { "collaboration_id": _SERIALIZER.url( "collaboration_id", collaboration_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + if scope is not None: + _params["scope"] = _SERIALIZER.query("scope", scope, "str") + if from_seqno is not None: + _params["from_seqno"] = _SERIALIZER.query( + "from_seqno", from_seqno, "str") + if to_seqno is not None: + _params["to_seqno"] = _SERIALIZER.query("to_seqno", to_seqno, "str") + # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest( method="GET", url=_url, + params=_params, headers=_headers, - json=json, - **kwargs) - - -def build_collaboration_attestationreport_cleanroom_get_request( # pylint: disable=name-too-long - collaboration_id: str, *, json: Optional[Any] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/attestationreport/cleanroom" - path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest( - method="GET", - url=_url, - headers=_headers, - json=json, **kwargs) @@ -887,15 +720,16 @@ def __init__(self, *args, **kwargs) -> None: 0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, body: Optional[Any] = None, **kwargs: Any) -> List[str]: + def list(self, *, active_only: bool = False, **kwargs: Any) -> List[JSON]: """List all collaborations. List all collaborations. - :param body: Default value is None. - :type body: any - :return: list of str - :rtype: list[str] + :keyword active_only: When true, returns only active collaborations (email-only lookup). When + false or omitted, returns all collaborations. Default value is False. + :paramtype active_only: bool + :return: list of JSON object + :rtype: list[JSON] :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -903,7 +737,11 @@ def list(self, body: Optional[Any] = None, **kwargs: Any) -> List[str]: # response body for status code(s): 200 response == [ - "str" + { + "collaborationId": "str", + "collaborationName": "str", + "userStatus": "str" + } ] """ error_map: MutableMapping = { @@ -914,23 +752,13 @@ def list(self, body: Optional[Any] = None, **kwargs: Any) -> List[str]: } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[List[str]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None + cls: ClsType[List[JSON]] = kwargs.pop("cls", None) _request = build_collaboration_list_request( - content_type=content_type, - json=_json, + active_only=active_only, headers=_headers, params=_params, ) @@ -959,17 +787,18 @@ def list(self, body: Optional[Any] = None, **kwargs: Any) -> List[str]: return cls( pipeline_response, cast( - List[str], + List[JSON], deserialized), {}) # type: ignore - return cast(List[str], deserialized) # type: ignore + return cast(List[JSON], deserialized) # type: ignore @distributed_trace def id_get( self, collaboration_id: str, - body: Optional[Any] = None, + *, + active_only: bool = False, **kwargs: Any) -> JSON: """Get collaboration by id. @@ -977,8 +806,9 @@ def id_get( :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any + :keyword active_only: When true, queries only the email-based table (active collaborations). + When false or omitted, queries all tables. Default value is False. + :paramtype active_only: bool :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -989,9 +819,7 @@ def id_get( # response body for status code(s): 200 response == { "collaborationId": "str", - "consortiumEndpoint": "str", - "consortiumServiceCertificatePem": "str", - "userEmail": "str", + "collaborationName": "str", "userStatus": "str" } # response body for status code(s): 422 @@ -1011,24 +839,14 @@ def id_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_id_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, + active_only=active_only, headers=_headers, params=_params, ) @@ -1064,30 +882,56 @@ def id_get( return cast(JSON, deserialized) # type: ignore @distributed_trace - def workloads_get(self, - collaboration_id: str, - body: Optional[Any] = None, - **kwargs: Any) -> Union[List[str], - JSON]: - """List all collaboration workloads. + def report_get(self, collaboration_id: str, **kwargs: Any) -> JSON: + """Get collaboration report. - List all collaboration workloads. + Get collaboration report. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any - :return: list of str or JSON object - :rtype: list[str] or JSON + :return: JSON object + :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response == [ - "str" - ] + response == { + "cgs": { + "cgsEndpoint": "str", + "recoveryAgentEndpoint": "str", + "report": { + "platform": "str", + "reportDataPayload": "str", + "report": { + "attestation": "str", + "platformCertificates": "str", + "serviceCert": "str", + "uvmEndorsements": "str" + } + } + }, + "consortiumManager": { + "endpoint": "str", + "report": { + "platform": "str", + "serviceCert": "str", + "hostData": "str", + "report": { + "attestation": "str", + "platformCertificates": "str", + "serviceCert": "str", + "uvmEndorsements": "str" + } + } + } + } + # response body for status code(s): 400 + response == { + "error": "str", + "message": "str" + } # response body for status code(s): 422 response == { "loc": [ @@ -1105,24 +949,13 @@ def workloads_get(self, } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[Union[List[str], JSON]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None + cls: ClsType[JSON] = kwargs.pop("cls", None) - _request = build_collaboration_workloads_get_request( + _request = build_collaboration_report_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -1135,7 +968,7 @@ def workloads_get(self, response = pipeline_response.http_response - if response.status_code not in [200, 422]: + if response.status_code not in [200, 400, 422]: map_error( status_code=response.status_code, response=response, @@ -1148,25 +981,23 @@ def workloads_get(self, deserialized = None if cls: - return cls(pipeline_response, cast( - Union[List[str], JSON], deserialized), {}) # type: ignore + return cls( + pipeline_response, + cast( + JSON, + deserialized), + {}) # type: ignore - return cast(Union[List[str], JSON], deserialized) # type: ignore + return cast(JSON, deserialized) # type: ignore @distributed_trace - def analytics_get( - self, - collaboration_id: str, - body: Optional[Any] = None, - **kwargs: Any) -> JSON: + def analytics_get(self, collaboration_id: str, **kwargs: Any) -> JSON: """Get collaboration analytics workload. Get collaboration analytics workload. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -1176,7 +1007,7 @@ def analytics_get( # response body for status code(s): 200 response == { - "data": {}, + "data": "str", "id": "str", "state": "str", "version": "str" @@ -1198,24 +1029,13 @@ def analytics_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_analytics_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -1251,19 +1071,16 @@ def analytics_get( return cast(JSON, deserialized) # type: ignore @distributed_trace - def analytics_deployment_info_get( + def analytics_cleanroompolicy_get( self, collaboration_id: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: - """Get collaboration analytics deploymentInfo. + """Get collaboration analytics cleanroompolicy. - Get collaboration analytics deploymentInfo. + Get collaboration analytics cleanroompolicy. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -1273,7 +1090,14 @@ def analytics_deployment_info_get( # response body for status code(s): 200 response == { - "data": {} + "claims": { + "claims": { + "str": {} + } + }, + "proposalIds": [ + "str" + ] } # response body for status code(s): 422 response == { @@ -1292,24 +1116,13 @@ def analytics_deployment_info_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - - _request = build_collaboration_analytics_deployment_info_get_request( + _request = build_collaboration_analytics_cleanroompolicy_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -1345,19 +1158,16 @@ def analytics_deployment_info_get( return cast(JSON, deserialized) # type: ignore @distributed_trace - def analytics_cleanroompolicy_get( + def oidc_issuer_info_get( self, collaboration_id: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: - """Get collaboration analytics cleanroompolicy. + """Get collaboration oidcissuer. - Get collaboration analytics cleanroompolicy. + Get collaboration oidcissuer. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -1367,8 +1177,12 @@ def analytics_cleanroompolicy_get( # response body for status code(s): 200 response == { - "policy": {}, - "proposalId": "str" + "enabled": bool, + "issuerUrl": "str", + "tenantData": { + "issuerUrl": "str", + "tenantId": "str" + } } # response body for status code(s): 422 response == { @@ -1387,24 +1201,13 @@ def analytics_cleanroompolicy_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - - _request = build_collaboration_analytics_cleanroompolicy_get_request( + _request = build_collaboration_oidc_issuer_info_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -1439,20 +1242,26 @@ def analytics_cleanroompolicy_get( return cast(JSON, deserialized) # type: ignore - @distributed_trace - def oidc_issuer_info_get( - self, - collaboration_id: str, - body: Optional[Any] = None, - **kwargs: Any) -> JSON: - """Get collaboration oidcissuer. + @overload + def oidc_set_issuer_url_post( + self, + collaboration_id: str, + body: Optional[JSON] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> JSON: + """Set collaboration oidc issuer url. - Get collaboration oidcissuer. + Set collaboration oidc issuer url. :param collaboration_id: Required. :type collaboration_id: str :param body: Default value is None. - :type body: any + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -1460,14 +1269,98 @@ def oidc_issuer_info_get( Example: .. code-block:: python + # JSON input template you can fill out and use as your body input. + body = { + "url": "str" + } + # response body for status code(s): 200 response == { - "enabled": bool, - "issuerUrl": "str", - "tenantData": { - "issuerUrl": "str", - "tenantId": "str" - } + "message": "str", + "url": "str" + } + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + + @overload + def oidc_set_issuer_url_post( + self, + collaboration_id: str, + body: Optional[IO[bytes]] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> JSON: + """Set collaboration oidc issuer url. + + Set collaboration oidc issuer url. + + :param collaboration_id: Required. + :type collaboration_id: str + :param body: Default value is None. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "message": "str", + "url": "str" + } + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + + @distributed_trace + def oidc_set_issuer_url_post(self, + collaboration_id: str, + body: Optional[Union[JSON, + IO[bytes]]] = None, + **kwargs: Any) -> JSON: + """Set collaboration oidc issuer url. + + Set collaboration oidc issuer url. + + :param collaboration_id: Required. + :type collaboration_id: str + :param body: Is either a JSON type or a IO[bytes] type. Default value is None. + :type body: JSON or IO[bytes] + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + body = { + "url": "str" + } + + # response body for status code(s): 200 + response == { + "message": "str", + "url": "str" } # response body for status code(s): 422 response == { @@ -1490,20 +1383,26 @@ def oidc_issuer_info_get( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) + "content_type", _headers.pop("Content-Type", None)) content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body + content_type = content_type or "application/json" if body else None + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + if body is not None: + _json = body + else: + _json = None - _request = build_collaboration_oidc_issuer_info_get_request( + _request = build_collaboration_oidc_set_issuer_url_post_request( collaboration_id=collaboration_id, content_type=content_type, json=_json, + content=_content, headers=_headers, params=_params, ) @@ -1539,28 +1438,38 @@ def oidc_issuer_info_get( return cast(JSON, deserialized) # type: ignore @distributed_trace - def invitations_get( - self, collaboration_id: str, body: Optional[Any] = None, **kwargs: Any - ) -> Union[List[str], JSON]: - """List all invitations. + def oidc_keys_get(self, collaboration_id: str, **kwargs: Any) -> JSON: + """Get collaboration oidc signing keys (JWKS). - List all invitations. + Get collaboration oidc signing keys (JWKS). :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any - :return: list of str or JSON object - :rtype: list[str] or JSON + :return: JSON object + :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response == [ - "str" - ] + response == { + "keys": [ + { + "kty": "str", + "alg": "str", + "e": "str", + "kid": "str", + "n": "str", + "use": "str", + "x5c": [ + "str" + ], + "x5t": "str", + "x5t #S256": "str" + } + ] + } # response body for status code(s): 422 response == { "loc": [ @@ -1578,24 +1487,101 @@ def invitations_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[Union[List[str], JSON]] = kwargs.pop("cls", None) + cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body + _request = build_collaboration_oidc_keys_get_request( + collaboration_id=collaboration_id, + headers=_headers, + params=_params, + ) + _request.url = self._client.format_url(_request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 422]: + map_error( + status_code=response.status_code, + response=response, + error_map=error_map) + raise HttpResponseError(response=response) + + if response.content: + deserialized = response.json() else: - _json = None + deserialized = None + + if cls: + return cls( + pipeline_response, + cast( + JSON, + deserialized), + {}) # type: ignore + + return cast(JSON, deserialized) # type: ignore + + @distributed_trace + def invitations_get( + self, + collaboration_id: str, + *, + pending_only: bool = False, + **kwargs: Any) -> JSON: + """List all invitations. + + List all invitations. + + :param collaboration_id: Required. + :type collaboration_id: str + :keyword pending_only: When true, returns only invitations where the user's status is not + Active. When false or omitted, returns all matching invitations. Default value is False. + :paramtype pending_only: bool + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "accountType": "str", + "invitationId": "str", + "status": "str" + } + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[JSON] = kwargs.pop("cls", None) _request = build_collaboration_invitations_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, + pending_only=pending_only, headers=_headers, params=_params, ) @@ -1621,17 +1607,20 @@ def invitations_get( deserialized = None if cls: - return cls(pipeline_response, cast( - Union[List[str], JSON], deserialized), {}) # type: ignore + return cls( + pipeline_response, + cast( + JSON, + deserialized), + {}) # type: ignore - return cast(Union[List[str], JSON], deserialized) # type: ignore + return cast(JSON, deserialized) # type: ignore @distributed_trace def invitation_id_get( self, collaboration_id: str, invitation_id: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: """Get invitation by id. @@ -1641,8 +1630,6 @@ def invitation_id_get( :type collaboration_id: str :param invitation_id: Required. :type invitation_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -1679,25 +1666,14 @@ def invitation_id_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_invitation_id_get_request( collaboration_id=collaboration_id, invitation_id=invitation_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -1733,12 +1709,11 @@ def invitation_id_get( return cast(JSON, deserialized) # type: ignore @distributed_trace - def invitation_id_accept_post(self, - collaboration_id: str, - invitation_id: str, - body: Optional[Any] = None, - **kwargs: Any) -> Union[Any, - JSON]: + def invitation_id_accept_post( + self, + collaboration_id: str, + invitation_id: str, + **kwargs: Any) -> Optional[JSON]: """Accept invitation by id. Accept invitation by id. @@ -1747,10 +1722,8 @@ def invitation_id_accept_post(self, :type collaboration_id: str :param invitation_id: Required. :type invitation_id: str - :param body: Default value is None. - :type body: any - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -1773,25 +1746,14 @@ def invitation_id_accept_post(self, } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None + cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) _request = build_collaboration_invitation_id_accept_post_request( collaboration_id=collaboration_id, invitation_id=invitation_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -1804,47 +1766,54 @@ def invitation_id_accept_post(self, response = pipeline_response.http_response - if response.status_code not in [200, 422]: + if response.status_code not in [204, 422]: map_error( status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if response.content: - deserialized = response.json() - else: - deserialized = None + deserialized = None + if response.status_code == 422: + if response.content: + deserialized = response.json() + else: + deserialized = None if cls: - return cls(pipeline_response, cast( - Union[Any, JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore - return cast(Union[Any, JSON], deserialized) # type: ignore + return deserialized # type: ignore @distributed_trace def analytics_datasets_list_get( - self, collaboration_id: str, body: Optional[Any] = None, **kwargs: Any - ) -> Union[List[str], JSON]: + self, + collaboration_id: str, + **kwargs: Any) -> JSON: """List all datasets. List all datasets. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any - :return: list of str or JSON object - :rtype: list[str] or JSON + :return: JSON object + :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response == [ - "str" - ] + response == { + "value": [ + { + "id": "str", + "labels": { + "str": "str" + } + } + ] + } # response body for status code(s): 422 response == { "loc": [ @@ -1862,24 +1831,13 @@ def analytics_datasets_list_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[Union[List[str], JSON]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None + cls: ClsType[JSON] = kwargs.pop("cls", None) _request = build_collaboration_analytics_datasets_list_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -1905,17 +1863,20 @@ def analytics_datasets_list_get( deserialized = None if cls: - return cls(pipeline_response, cast( - Union[List[str], JSON], deserialized), {}) # type: ignore + return cls( + pipeline_response, + cast( + JSON, + deserialized), + {}) # type: ignore - return cast(Union[List[str], JSON], deserialized) # type: ignore + return cast(JSON, deserialized) # type: ignore @distributed_trace def analytics_dataset_document_id_get( self, collaboration_id: str, document_id: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: """Get dataset by id. @@ -1925,8 +1886,6 @@ def analytics_dataset_document_id_get( :type collaboration_id: str :param document_id: Required. :type document_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -1937,77 +1896,6 @@ def analytics_dataset_document_id_get( # response body for status code(s): 200 response == { "data": { - "datasetAccessPoint": { - "name": "str", - "path": "str", - "protection": { - "proxyMode": "str", - "proxyType": "str", - "configuration": "", - "encryptionSecretAccessIdentity": { - "clientId": "str", - "name": "str", - "tenantId": "str", - "tokenIssuer": {} - }, - "encryptionSecrets": { - "dek": { - "name": "str", - "secret": { - "backingResource": { - "id": "str", - "name": "str", - "provider": { - "protocol": - "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "secretType": "str" - } - }, - "kek": { - "name": "str", - "secret": { - "backingResource": { - "id": "str", - "name": "str", - "provider": { - "protocol": - "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "secretType": "str" - } - } - }, - "privacyPolicy": { - "policy": {} - } - }, - "store": { - "id": "str", - "name": "str", - "provider": { - "protocol": "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "type": "str", - "identity": { - "clientId": "str", - "name": "str", - "tenantId": "str", - "tokenIssuer": {} - } - }, "datasetAccessPolicy": { "accessMode": "str", "allowedFields": [ @@ -2023,7 +1911,30 @@ def analytics_dataset_document_id_get( ], "format": "str" }, - "name": "str" + "name": "str", + "store": { + "containerName": "str", + "encryptionMode": "str", + "storageAccountType": "str", + "storageAccountUrl": "str", + "awsCgsSecretId": "str" + }, + "dek": { + "keyVaultUrl": "str", + "secretId": "str", + "maaUrl": "str" + }, + "identity": { + "clientId": "str", + "issuerUrl": "str", + "name": "str", + "tenantId": "str" + }, + "kek": { + "keyVaultUrl": "str", + "secretId": "str", + "maaUrl": "str" + } }, "id": "str", "proposerId": "str", @@ -2047,25 +1958,14 @@ def analytics_dataset_document_id_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_analytics_dataset_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -2109,7 +2009,7 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Publish dataset by id. Publish dataset by id. @@ -2123,8 +2023,8 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -2132,96 +2032,45 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long # JSON input template you can fill out and use as your body input. body = { - "data": { - "datasetAccessPoint": { - "name": "str", - "path": "str", - "protection": { - "proxyMode": "str", - "proxyType": "str", - "configuration": "", - "encryptionSecretAccessIdentity": { - "clientId": "str", - "name": "str", - "tenantId": "str", - "tokenIssuer": {} - }, - "encryptionSecrets": { - "dek": { - "name": "str", - "secret": { - "backingResource": { - "id": "str", - "name": "str", - "provider": { - "protocol": - "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "secretType": "str" - } - }, - "kek": { - "name": "str", - "secret": { - "backingResource": { - "id": "str", - "name": "str", - "provider": { - "protocol": - "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "secretType": "str" - } - } - }, - "privacyPolicy": { - "policy": {} - } - }, - "store": { - "id": "str", - "name": "str", - "provider": { - "protocol": "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "type": "str", - "identity": { - "clientId": "str", - "name": "str", - "tenantId": "str", - "tokenIssuer": {} + "datasetAccessPolicy": { + "accessMode": "str", + "allowedFields": [ + "str" + ] + }, + "datasetSchema": { + "fields": [ + { + "fieldName": "str", + "fieldType": "str" } - }, - "datasetAccessPolicy": { - "accessMode": "str", - "allowedFields": [ - "str" - ] - }, - "datasetSchema": { - "fields": [ - { - "fieldName": "str", - "fieldType": "str" - } - ], - "format": "str" - }, - "name": "str" + ], + "format": "str" }, - "version": "str" + "name": "str", + "store": { + "containerName": "str", + "encryptionMode": "str", + "storageAccountType": "str", + "storageAccountUrl": "str", + "awsCgsSecretId": "str" + }, + "dek": { + "keyVaultUrl": "str", + "secretId": "str", + "maaUrl": "str" + }, + "identity": { + "clientId": "str", + "issuerUrl": "str", + "name": "str", + "tenantId": "str" + }, + "kek": { + "keyVaultUrl": "str", + "secretId": "str", + "maaUrl": "str" + } } # response body for status code(s): 422 @@ -2243,7 +2092,7 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Publish dataset by id. Publish dataset by id. @@ -2257,8 +2106,8 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -2277,7 +2126,7 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long @distributed_trace def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long self, collaboration_id: str, document_id: str, body: Union[JSON, IO[bytes]], **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Publish dataset by id. Publish dataset by id. @@ -2288,8 +2137,8 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long :type document_id: str :param body: Is either a JSON type or a IO[bytes] type. Required. :type body: JSON or IO[bytes] - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -2297,96 +2146,45 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long # JSON input template you can fill out and use as your body input. body = { - "data": { - "datasetAccessPoint": { - "name": "str", - "path": "str", - "protection": { - "proxyMode": "str", - "proxyType": "str", - "configuration": "", - "encryptionSecretAccessIdentity": { - "clientId": "str", - "name": "str", - "tenantId": "str", - "tokenIssuer": {} - }, - "encryptionSecrets": { - "dek": { - "name": "str", - "secret": { - "backingResource": { - "id": "str", - "name": "str", - "provider": { - "protocol": - "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "secretType": "str" - } - }, - "kek": { - "name": "str", - "secret": { - "backingResource": { - "id": "str", - "name": "str", - "provider": { - "protocol": - "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "secretType": "str" - } - } - }, - "privacyPolicy": { - "policy": {} - } - }, - "store": { - "id": "str", - "name": "str", - "provider": { - "protocol": "str", - "url": "str", - "configuration": "" - }, - "type": "str" - }, - "type": "str", - "identity": { - "clientId": "str", - "name": "str", - "tenantId": "str", - "tokenIssuer": {} + "datasetAccessPolicy": { + "accessMode": "str", + "allowedFields": [ + "str" + ] + }, + "datasetSchema": { + "fields": [ + { + "fieldName": "str", + "fieldType": "str" } - }, - "datasetAccessPolicy": { - "accessMode": "str", - "allowedFields": [ - "str" - ] - }, - "datasetSchema": { - "fields": [ - { - "fieldName": "str", - "fieldType": "str" - } - ], - "format": "str" - }, - "name": "str" + ], + "format": "str" }, - "version": "str" + "name": "str", + "store": { + "containerName": "str", + "encryptionMode": "str", + "storageAccountType": "str", + "storageAccountUrl": "str", + "awsCgsSecretId": "str" + }, + "dek": { + "keyVaultUrl": "str", + "secretId": "str", + "maaUrl": "str" + }, + "identity": { + "clientId": "str", + "issuerUrl": "str", + "name": "str", + "tenantId": "str" + }, + "kek": { + "keyVaultUrl": "str", + "secretId": "str", + "maaUrl": "str" + } } # response body for status code(s): 422 @@ -2411,7 +2209,7 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) + cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2439,30 +2237,30 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long response = pipeline_response.http_response - if response.status_code not in [200, 422]: + if response.status_code not in [204, 422]: map_error( status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if response.content: - deserialized = response.json() - else: - deserialized = None + deserialized = None + if response.status_code == 422: + if response.content: + deserialized = response.json() + else: + deserialized = None if cls: - return cls(pipeline_response, cast( - Union[Any, JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore - return cast(Union[Any, JSON], deserialized) # type: ignore + return deserialized # type: ignore @distributed_trace def check_consent_document_id_get( self, collaboration_id: str, document_id: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: """Check execution consent by ID of the Query or the Dataset. @@ -2472,8 +2270,6 @@ def check_consent_document_id_get( :type collaboration_id: str :param document_id: Required. :type document_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -2506,25 +2302,14 @@ def check_consent_document_id_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_check_consent_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -2559,22 +2344,29 @@ def check_consent_document_id_get( return cast(JSON, deserialized) # type: ignore - @distributed_trace - def set_consent_document_id_consent_action_post( # pylint: disable=name-too-long - self, collaboration_id: str, document_id: str, consent_action: str, body: Optional[Any] = None, **kwargs: Any + @overload + def set_consent_document_id_put( + self, + collaboration_id: str, + document_id: str, + body: JSON, + *, + content_type: str = "application/json", + **kwargs: Any ) -> Union[Any, JSON]: - """Set execution consent (accept / reject) by ID of the Query or the Dataset. + """Set execution consent (enable / disable) by ID of the Query or the Dataset. - Set execution consent (accept / reject) by ID of the Query or the Dataset. + Set execution consent (enable / disable) by ID of the Query or the Dataset. :param collaboration_id: Required. :type collaboration_id: str :param document_id: Required. :type document_id: str - :param consent_action: Known values are: "enable" and "disable". Required. - :type consent_action: str - :param body: Default value is None. - :type body: any + :param body: Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :return: any or JSON object :rtype: any or JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -2582,6 +2374,91 @@ def set_consent_document_id_consent_action_post( # pylint: disable=name-too-lon Example: .. code-block:: python + # JSON input template you can fill out and use as your body input. + body = { + "consentAction": "str" + } + + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + + @overload + def set_consent_document_id_put( + self, + collaboration_id: str, + document_id: str, + body: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> Union[Any, JSON]: + """Set execution consent (enable / disable) by ID of the Query or the Dataset. + + Set execution consent (enable / disable) by ID of the Query or the Dataset. + + :param collaboration_id: Required. + :type collaboration_id: str + :param document_id: Required. + :type document_id: str + :param body: Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: any or JSON object + :rtype: any or JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + + @distributed_trace + def set_consent_document_id_put(self, + collaboration_id: str, + document_id: str, + body: Union[JSON, + IO[bytes]], + **kwargs: Any) -> Union[Any, + JSON]: + """Set execution consent (enable / disable) by ID of the Query or the Dataset. + + Set execution consent (enable / disable) by ID of the Query or the Dataset. + + :param collaboration_id: Required. + :type collaboration_id: str + :param document_id: Required. + :type document_id: str + :param body: Is either a JSON type or a IO[bytes] type. Required. + :type body: JSON or IO[bytes] + :return: any or JSON object + :rtype: any or JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + body = { + "consentAction": "str" + } + # response body for status code(s): 422 response == { "loc": [ @@ -2603,22 +2480,23 @@ def set_consent_document_id_consent_action_post( # pylint: disable=name-too-lon _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None + "content_type", _headers.pop("Content-Type", None)) cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) - if body is not None: - _json = body + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + _json = body - _request = build_collaboration_set_consent_document_id_consent_action_post_request( + _request = build_collaboration_set_consent_document_id_put_request( collaboration_id=collaboration_id, document_id=document_id, - consent_action=consent_action, content_type=content_type, json=_json, + content=_content, headers=_headers, params=_params, ) @@ -2658,7 +2536,7 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Publish query by id. Publish query by id. @@ -2672,8 +2550,8 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -2681,36 +2559,16 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long # JSON input template you can fill out and use as your body input. body = { - "inputDatasets": [ + "inputDatasets": "str", + "outputDataset": "str", + "queryData": [ { - "datasetDocumentId": "str", - "view": "str" + "data": "str", + "executionSequence": 0, + "postFilters": "str", + "preConditions": "str" } - ], - "outputDataset": { - "datasetDocumentId": "str", - "view": "str" - }, - "queryData": { - "segments": [ - { - "data": "str", - "executionSequence": 0, - "postFilters": [ - { - "columnName": "str", - "value": 0 - } - ], - "preConditions": [ - { - "minRowCount": 0, - "viewName": "str" - } - ] - } - ] - } + ] } # response body for status code(s): 422 @@ -2732,7 +2590,7 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Publish query by id. Publish query by id. @@ -2746,8 +2604,8 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -2766,7 +2624,7 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long @distributed_trace def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long self, collaboration_id: str, document_id: str, body: Union[JSON, IO[bytes]], **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Publish query by id. Publish query by id. @@ -2777,8 +2635,8 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long :type document_id: str :param body: Is either a JSON type or a IO[bytes] type. Required. :type body: JSON or IO[bytes] - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -2786,36 +2644,16 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long # JSON input template you can fill out and use as your body input. body = { - "inputDatasets": [ + "inputDatasets": "str", + "outputDataset": "str", + "queryData": [ { - "datasetDocumentId": "str", - "view": "str" + "data": "str", + "executionSequence": 0, + "postFilters": "str", + "preConditions": "str" } - ], - "outputDataset": { - "datasetDocumentId": "str", - "view": "str" - }, - "queryData": { - "segments": [ - { - "data": "str", - "executionSequence": 0, - "postFilters": [ - { - "columnName": "str", - "value": 0 - } - ], - "preConditions": [ - { - "minRowCount": 0, - "viewName": "str" - } - ] - } - ] - } + ] } # response body for status code(s): 422 @@ -2840,7 +2678,7 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) + cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2868,129 +2706,36 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long response = pipeline_response.http_response - if response.status_code not in [200, 422]: + if response.status_code not in [204, 422]: map_error( status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if response.content: - deserialized = response.json() - else: - deserialized = None + deserialized = None + if response.status_code == 422: + if response.content: + deserialized = response.json() + else: + deserialized = None if cls: - return cls(pipeline_response, cast( - Union[Any, JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore - return cast(Union[Any, JSON], deserialized) # type: ignore + return deserialized # type: ignore @distributed_trace def analytics_queries_list_get( - self, collaboration_id: str, body: Optional[Any] = None, **kwargs: Any - ) -> Union[List[str], JSON]: - """List all queries. - - List all queries. - - :param collaboration_id: Required. - :type collaboration_id: str - :param body: Default value is None. - :type body: any - :return: list of str or JSON object - :rtype: list[str] or JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == [ - "str" - ] - # response body for status code(s): 422 - response == { - "loc": [ - {} - ], - "msg": "str", - "type": "str" - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[Union[List[str], JSON]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None - - _request = build_collaboration_analytics_queries_list_get_request( - collaboration_id=collaboration_id, - content_type=content_type, - json=_json, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast( - Union[List[str], JSON], deserialized), {}) # type: ignore - - return cast(Union[List[str], JSON], deserialized) # type: ignore - - @distributed_trace - def analytics_queries_document_id_get( self, collaboration_id: str, - document_id: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: - """Get query by id. + """List all queries. - Get query by id. + List all queries. :param collaboration_id: Required. :type collaboration_id: str - :param document_id: Required. - :type document_id: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -3000,31 +2745,14 @@ def analytics_queries_document_id_get( # response body for status code(s): 200 response == { - "approvers": [ + "value": [ { - "approverId": "str", - "approverIdType": "str" - } - ], - "data": { - "applicationType": "str", - "inputDataset": [ - { - "specification": "str", - "view": "str" + "id": "str", + "labels": { + "str": "str" } - ], - "outputDataset": { - "specification": "str", - "view": "str" - }, - "query": "str" - }, - "id": "str", - "proposalId": "str", - "proposerId": "str", - "state": "str", - "version": "str" + } + ] } # response body for status code(s): 422 response == { @@ -3043,25 +2771,13 @@ def analytics_queries_document_id_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - - _request = build_collaboration_analytics_queries_document_id_get_request( + _request = build_collaboration_analytics_queries_list_get_request( collaboration_id=collaboration_id, - document_id=document_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -3096,117 +2812,53 @@ def analytics_queries_document_id_get( return cast(JSON, deserialized) # type: ignore - @overload - def analytics_queries_document_id_vote_accept_post( # pylint: disable=name-too-long - self, - collaboration_id: str, - document_id: str, - body: Optional[JSON] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> Union[Any, JSON]: - """Vote accept on query by id. - - Vote accept on query by id. - - :param collaboration_id: Required. - :type collaboration_id: str - :param document_id: Required. - :type document_id: str - :param body: Default value is None. - :type body: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "proposalId": "str" - } - - # response body for status code(s): 422 - response == { - "loc": [ - {} - ], - "msg": "str", - "type": "str" - } - """ - - @overload - def analytics_queries_document_id_vote_accept_post( # pylint: disable=name-too-long - self, - collaboration_id: str, - document_id: str, - body: Optional[IO[bytes]] = None, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> Union[Any, JSON]: - """Vote accept on query by id. - - Vote accept on query by id. - - :param collaboration_id: Required. - :type collaboration_id: str - :param document_id: Required. - :type document_id: str - :param body: Default value is None. - :type body: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 422 - response == { - "loc": [ - {} - ], - "msg": "str", - "type": "str" - } - """ - @distributed_trace - def analytics_queries_document_id_vote_accept_post( # pylint: disable=name-too-long - self, collaboration_id: str, document_id: str, body: Optional[Union[JSON, IO[bytes]]] = None, **kwargs: Any - ) -> Union[Any, JSON]: - """Vote accept on query by id. + def analytics_queries_document_id_get( + self, + collaboration_id: str, + document_id: str, + **kwargs: Any) -> JSON: + """Get query by id. - Vote accept on query by id. + Get query by id. :param collaboration_id: Required. :type collaboration_id: str :param document_id: Required. :type document_id: str - :param body: Is either a JSON type or a IO[bytes] type. Default value is None. - :type body: JSON or IO[bytes] - :return: any or JSON object - :rtype: any or JSON + :return: JSON object + :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python - # JSON input template you can fill out and use as your body input. - body = { - "proposalId": "str" + # response body for status code(s): 200 + response == { + "approvers": [ + { + "approverId": "str", + "approverIdType": "str" + } + ], + "data": { + "inputDatasets": "str", + "outputDataset": "str", + "queryData": [ + { + "data": "str", + "executionSequence": 0, + "postFilters": "str", + "preConditions": "str" + } + ] + }, + "id": "str", + "proposalId": "str", + "proposerId": "str", + "state": "str", + "version": "str" } - # response body for status code(s): 422 response == { "loc": [ @@ -3224,31 +2876,14 @@ def analytics_queries_document_id_vote_accept_post( # pylint: disable=name-too- } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) - content_type = content_type if body else None - cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" if body else None - _json = None - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - if body is not None: - _json = body - else: - _json = None + cls: ClsType[JSON] = kwargs.pop("cls", None) - _request = build_collaboration_analytics_queries_document_id_vote_accept_post_request( + _request = build_collaboration_analytics_queries_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, - content_type=content_type, - json=_json, - content=_content, headers=_headers, params=_params, ) @@ -3274,13 +2909,17 @@ def analytics_queries_document_id_vote_accept_post( # pylint: disable=name-too- deserialized = None if cls: - return cls(pipeline_response, cast( - Union[Any, JSON], deserialized), {}) # type: ignore + return cls( + pipeline_response, + cast( + JSON, + deserialized), + {}) # type: ignore - return cast(Union[Any, JSON], deserialized) # type: ignore + return cast(JSON, deserialized) # type: ignore @overload - def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too-long + def analytics_queries_document_id_vote_post( self, collaboration_id: str, document_id: str, @@ -3288,10 +2927,10 @@ def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too- *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: - """Vote reject on query by id. + ) -> Optional[JSON]: + """Vote on query by id. - Vote reject on query by id. + Vote on query by id. :param collaboration_id: Required. :type collaboration_id: str @@ -3302,8 +2941,8 @@ def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too- :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -3311,7 +2950,8 @@ def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too- # JSON input template you can fill out and use as your body input. body = { - "proposalId": "str" + "proposalId": "str", + "voteAction": "str" } # response body for status code(s): 422 @@ -3325,7 +2965,7 @@ def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too- """ @overload - def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too-long + def analytics_queries_document_id_vote_post( self, collaboration_id: str, document_id: str, @@ -3333,10 +2973,10 @@ def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too- *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: - """Vote reject on query by id. + ) -> Optional[JSON]: + """Vote on query by id. - Vote reject on query by id. + Vote on query by id. :param collaboration_id: Required. :type collaboration_id: str @@ -3347,8 +2987,8 @@ def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too- :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -3365,12 +3005,15 @@ def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too- """ @distributed_trace - def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too-long - self, collaboration_id: str, document_id: str, body: Optional[Union[JSON, IO[bytes]]] = None, **kwargs: Any - ) -> Union[Any, JSON]: - """Vote reject on query by id. + def analytics_queries_document_id_vote_post(self, + collaboration_id: str, + document_id: str, + body: Optional[Union[JSON, + IO[bytes]]] = None, + **kwargs: Any) -> Optional[JSON]: + """Vote on query by id. - Vote reject on query by id. + Vote on query by id. :param collaboration_id: Required. :type collaboration_id: str @@ -3378,8 +3021,8 @@ def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too- :type document_id: str :param body: Is either a JSON type or a IO[bytes] type. Default value is None. :type body: JSON or IO[bytes] - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -3387,7 +3030,8 @@ def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too- # JSON input template you can fill out and use as your body input. body = { - "proposalId": "str" + "proposalId": "str", + "voteAction": "str" } # response body for status code(s): 422 @@ -3413,7 +3057,7 @@ def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too- content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop("Content-Type", None)) content_type = content_type if body else None - cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) + cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) content_type = content_type or "application/json" if body else None _json = None @@ -3426,7 +3070,7 @@ def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too- else: _json = None - _request = build_collaboration_analytics_queries_document_id_vote_reject_post_request( + _request = build_collaboration_analytics_queries_document_id_vote_post_request( collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, @@ -3444,23 +3088,24 @@ def analytics_queries_document_id_vote_reject_post( # pylint: disable=name-too- response = pipeline_response.http_response - if response.status_code not in [200, 422]: + if response.status_code not in [204, 422]: map_error( status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if response.content: - deserialized = response.json() - else: - deserialized = None + deserialized = None + if response.status_code == 422: + if response.content: + deserialized = response.json() + else: + deserialized = None if cls: - return cls(pipeline_response, cast( - Union[Any, JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore - return cast(Union[Any, JSON], deserialized) # type: ignore + return deserialized # type: ignore @overload def analytics_queries_document_id_run_post( @@ -3739,18 +3384,15 @@ def analytics_queries_jobid_get( self, collaboration_id: str, jobid: str, - body: Optional[Any] = None, **kwargs: Any) -> JSON: - """Get query run result by run id. + """Get query run result by job id. - Get query run result by run id. + Get query run result by job id. :param collaboration_id: Required. :type collaboration_id: str :param jobid: Required. :type jobid: str - :param body: Default value is None. - :type body: any :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -3796,25 +3438,14 @@ def analytics_queries_jobid_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_analytics_queries_jobid_get_request( collaboration_id=collaboration_id, jobid=jobid, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -3851,7 +3482,7 @@ def analytics_queries_jobid_get( @distributed_trace def analytics_queries_document_id_runhistory_get( # pylint: disable=name-too-long - self, collaboration_id: str, document_id: str, body: Optional[Any] = None, **kwargs: Any + self, collaboration_id: str, document_id: str, **kwargs: Any ) -> Union[List[JSON], JSON]: """Get query run history by query id. @@ -3861,8 +3492,6 @@ def analytics_queries_document_id_runhistory_get( # pylint: disable=name-too-lo :type collaboration_id: str :param document_id: Required. :type document_id: str - :param body: Default value is None. - :type body: any :return: list of JSON object or JSON object :rtype: list[JSON] or JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -3873,9 +3502,48 @@ def analytics_queries_document_id_runhistory_get( # pylint: disable=name-too-lo # response body for status code(s): 200 response == [ { - "data": {}, "queryId": "str", - "runId": "str" + "runs": [ + { + "isSuccessful": bool, + "runId": "str", + "durationSeconds": 0.0, + "endTime": "2020-02-20 00:00:00", + "error": { + "code": "str", + "message": "str" + }, + "startTime": "2020-02-20 00:00:00", + "stats": { + "rowsRead": 0, + "rowsWritten": 0 + } + } + ], + "latestRun": { + "isSuccessful": bool, + "runId": "str", + "durationSeconds": 0.0, + "endTime": "2020-02-20 00:00:00", + "error": { + "code": "str", + "message": "str" + }, + "startTime": "2020-02-20 00:00:00", + "stats": { + "rowsRead": 0, + "rowsWritten": 0 + } + }, + "summary": { + "avgDurationSeconds": 0.0, + "failedRuns": 0, + "successfulRuns": 0, + "totalRowsRead": 0, + "totalRowsWritten": 0, + "totalRuns": 0, + "totalRuntimeSeconds": 0.0 + } } ] # response body for status code(s): 422 @@ -3895,25 +3563,14 @@ def analytics_queries_document_id_runhistory_get( # pylint: disable=name-too-lo } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[Union[List[JSON], JSON]] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - _request = build_collaboration_analytics_queries_document_id_runhistory_get_request( collaboration_id=collaboration_id, document_id=document_id, - content_type=content_type, - json=_json, headers=_headers, params=_params, ) @@ -3945,19 +3602,19 @@ def analytics_queries_document_id_runhistory_get( # pylint: disable=name-too-lo return cast(Union[List[JSON], JSON], deserialized) # type: ignore @distributed_trace - def analytics_auditevents_get( - self, collaboration_id: str, body: Optional[Any] = None, **kwargs: Any - ) -> Union[List[JSON], JSON]: - """Get audit events for analytics workload. + def analytics_datasets_document_id_queries_get( # pylint: disable=name-too-long + self, collaboration_id: str, document_id: str, **kwargs: Any + ) -> Union[List[str], JSON]: + """Get queries by dataset id. - Get audit events for analytics workload. + Get queries by dataset id. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any - :return: list of JSON object or JSON object - :rtype: list[JSON] or JSON + :param document_id: Required. + :type document_id: str + :return: list of str or JSON object + :rtype: list[str] or JSON :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -3965,13 +3622,7 @@ def analytics_auditevents_get( # response body for status code(s): 200 response == [ - { - "data": {}, - "id": "str", - "scope": "str", - "timestamp": "str", - "timestampIso": "str" - } + "str" ] # response body for status code(s): 422 response == { @@ -3990,24 +3641,14 @@ def analytics_auditevents_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None - cls: ClsType[Union[List[JSON], JSON]] = kwargs.pop("cls", None) - - if body is not None: - _json = body - else: - _json = None + cls: ClsType[Union[List[str], JSON]] = kwargs.pop("cls", None) - _request = build_collaboration_analytics_auditevents_get_request( + _request = build_collaboration_analytics_datasets_document_id_queries_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, + document_id=document_id, headers=_headers, params=_params, ) @@ -4034,24 +3675,33 @@ def analytics_auditevents_get( if cls: return cls(pipeline_response, cast( - Union[List[JSON], JSON], deserialized), {}) # type: ignore + Union[List[str], JSON], deserialized), {}) # type: ignore - return cast(Union[List[JSON], JSON], deserialized) # type: ignore + return cast(Union[List[str], JSON], deserialized) # type: ignore - @distributed_trace - def attestationreport_cgs_get( - self, - collaboration_id: str, - body: Optional[Any] = None, - **kwargs: Any) -> JSON: - """Get attestation report from CGS. + @overload + def analytics_secrets_secret_name_put( + self, + collaboration_id: str, + secret_name: str, + body: Optional[JSON] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> JSON: + """Set secret for analytics workload. - Get attestation report from CGS. + Set secret for analytics workload. :param collaboration_id: Required. :type collaboration_id: str + :param secret_name: Required. + :type secret_name: str :param body: Default value is None. - :type body: any + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -4059,15 +3709,101 @@ def attestationreport_cgs_get( Example: .. code-block:: python + # JSON input template you can fill out and use as your body input. + body = { + "secretValue": "str" + } + # response body for status code(s): 200 response == { - "platform": "str", - "reportDataPayload": "str", - "report": { - "attestation": "str", - "platformCertificates": "str", - "uvmEndorsements": "str" - } + "secretId": "str" + } + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + + @overload + def analytics_secrets_secret_name_put( + self, + collaboration_id: str, + secret_name: str, + body: Optional[IO[bytes]] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> JSON: + """Set secret for analytics workload. + + Set secret for analytics workload. + + :param collaboration_id: Required. + :type collaboration_id: str + :param secret_name: Required. + :type secret_name: str + :param body: Default value is None. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "secretId": "str" + } + # response body for status code(s): 422 + response == { + "loc": [ + {} + ], + "msg": "str", + "type": "str" + } + """ + + @distributed_trace + def analytics_secrets_secret_name_put(self, + collaboration_id: str, + secret_name: str, + body: Optional[Union[JSON, + IO[bytes]]] = None, + **kwargs: Any) -> JSON: + """Set secret for analytics workload. + + Set secret for analytics workload. + + :param collaboration_id: Required. + :type collaboration_id: str + :param secret_name: Required. + :type secret_name: str + :param body: Is either a JSON type or a IO[bytes] type. Default value is None. + :type body: JSON or IO[bytes] + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + body = { + "secretValue": "str" + } + + # response body for status code(s): 200 + response == { + "secretId": "str" } # response body for status code(s): 422 response == { @@ -4090,20 +3826,27 @@ def attestationreport_cgs_get( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) + "content_type", _headers.pop("Content-Type", None)) content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body + content_type = content_type or "application/json" if body else None + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + if body is not None: + _json = body + else: + _json = None - _request = build_collaboration_attestationreport_cgs_get_request( + _request = build_collaboration_analytics_secrets_secret_name_put_request( collaboration_id=collaboration_id, + secret_name=secret_name, content_type=content_type, json=_json, + content=_content, headers=_headers, params=_params, ) @@ -4139,19 +3882,27 @@ def attestationreport_cgs_get( return cast(JSON, deserialized) # type: ignore @distributed_trace - def attestationreport_cleanroom_get( - self, - collaboration_id: str, - body: Optional[Any] = None, - **kwargs: Any) -> JSON: - """Get attestation report from Cleanroom. + def analytics_auditevents_get( + self, + collaboration_id: str, + *, + scope: Optional[str] = None, + from_seqno: Optional[str] = None, + to_seqno: Optional[str] = None, + **kwargs: Any + ) -> JSON: + """Get audit events for analytics workload. - Get attestation report from Cleanroom. + Get audit events for analytics workload. :param collaboration_id: Required. :type collaboration_id: str - :param body: Default value is None. - :type body: any + :keyword scope: The event scope to query. Default value is None. + :paramtype scope: str + :keyword from_seqno: Start of the ledger sequence number range. Default value is None. + :paramtype from_seqno: str + :keyword to_seqno: End of the ledger sequence number range. Default value is None. + :paramtype to_seqno: str :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -4161,13 +3912,19 @@ def attestationreport_cleanroom_get( # response body for status code(s): 200 response == { - "platform": "str", - "reportDataPayload": "str", - "report": { - "attestation": "str", - "platformCertificates": "str", - "uvmEndorsements": "str" - } + "nextLink": "str", + "value": [ + { + "data": { + "message": "str", + "source": "str" + }, + "id": "str", + "scope": "str", + "timestamp": "str", + "timestampIso": "str" + } + ] } # response body for status code(s): 422 response == { @@ -4186,24 +3943,16 @@ def attestationreport_cleanroom_get( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", "application/json")) - content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) - if body is not None: - _json = body - else: - _json = None - - _request = build_collaboration_attestationreport_cleanroom_get_request( + _request = build_collaboration_analytics_auditevents_get_request( collaboration_id=collaboration_id, - content_type=content_type, - json=_json, + scope=scope, + from_seqno=from_seqno, + to_seqno=to_seqno, headers=_headers, params=_params, ) diff --git a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_collaboration.py b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_collaboration.py index de1124110aa..5bd0613897d 100644 --- a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_collaboration.py +++ b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_collaboration.py @@ -13,11 +13,13 @@ from azext_managedcleanroom._frontend_custom import ( frontend_collaboration_list, frontend_collaboration_show, - frontend_collaboration_workloads_list, frontend_collaboration_analytics_show, - frontend_collaboration_analytics_deploymentinfo, frontend_collaboration_analytics_cleanroompolicy, - frontend_collaboration_oidc_issuerinfo_show + frontend_collaboration_oidc_issuerinfo_show, + frontend_collaboration_oidc_set_issuer_url, + frontend_collaboration_oidc_keys_show, + frontend_collaboration_report_show, + frontend_collaboration_dataset_queries_list ) from azext_managedcleanroom.tests.latest.test_utils import ( MOCK_COLLABORATION, @@ -83,7 +85,7 @@ def test_show_collaboration_success(self, mock_get_client): self.assertEqual(result["collaborationId"], "test-collab-123") self.assertEqual(result["name"], "Test Collaboration") mock_client.collaboration.id_get.assert_called_once_with( - "test-collab-123") + "test-collab-123", active_only=False) @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') def test_show_collaboration_not_found(self, mock_get_client): @@ -103,32 +105,6 @@ def test_show_collaboration_not_found(self, mock_get_client): self.assertIn("not found", str(context.exception)) - # Workloads Tests - - @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') - def test_list_workloads_success(self, mock_get_client): - """Test listing workloads for a collaboration""" - # Mock workloads response - mock_workloads = [ - {"workloadId": "workload-1", "name": "Workload 1"}, - {"workloadId": "workload-2", "name": "Workload 2"} - ] - mock_client = Mock() - mock_client.collaboration.workloads_get.return_value = mock_workloads - mock_get_client.return_value = mock_client - - # Execute - result = frontend_collaboration_workloads_list( - cmd=Mock(), - collaboration_id="test-collab-123" - ) - - # Verify - self.assertEqual(len(result), 2) - self.assertEqual(result[0]["workloadId"], "workload-1") - mock_client.collaboration.workloads_get.assert_called_once_with( - "test-collab-123") - # Analytics Tests @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') @@ -151,31 +127,6 @@ def test_show_analytics(self, mock_get_client): mock_client.collaboration.analytics_get.assert_called_once_with( "test-collab-123") - @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') - def test_show_analytics_deployment_info(self, mock_get_client): - """Test showing analytics deployment information""" - # Mock deployment info - mock_deployment_info = { - "deploymentId": "deploy-123", - "region": "eastus", - "status": "deployed" - } - mock_client = Mock() - mock_client.collaboration.analytics_deployment_info_get.return_value = mock_deployment_info - mock_get_client.return_value = mock_client - - # Execute - result = frontend_collaboration_analytics_deploymentinfo( - cmd=Mock(), - collaboration_id="test-collab-123" - ) - - # Verify - self.assertEqual(result["deploymentId"], "deploy-123") - self.assertEqual(result["region"], "eastus") - mock_client.collaboration.analytics_deployment_info_get.assert_called_once_with( - "test-collab-123") - @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') def test_show_analytics_cleanroom_policy(self, mock_get_client): """Test showing cleanroom policy""" @@ -225,6 +176,159 @@ def test_show_oidc_issuer_info(self, mock_get_client): mock_client.collaboration.oidc_issuer_info_get.assert_called_once_with( "test-collab-123") + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_oidc_set_issuer_url(self, mock_get_client): + """Test setting OIDC issuer URL""" + # Mock OIDC response + mock_oidc = { + "issuer": "https://new-issuer.example.com", + "updatedAt": "2024-01-01T00:00:00Z" + } + mock_client = Mock() + mock_client.collaboration.oidc_set_issuer_url_post.return_value = mock_oidc + mock_get_client.return_value = mock_client + + # Execute + result = frontend_collaboration_oidc_set_issuer_url( + cmd=Mock(), + collaboration_id="test-collab-123", + url="https://new-issuer.example.com" + ) + + # Verify + self.assertEqual(result["issuer"], "https://new-issuer.example.com") + mock_client.collaboration.oidc_set_issuer_url_post.assert_called_once_with( + "test-collab-123", body={"url": "https://new-issuer.example.com"}) + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_oidc_keys_show(self, mock_get_client): + """Test showing OIDC keys""" + # Mock OIDC keys response + mock_keys = { + "keys": [ + {"kid": "key1", "kty": "RSA", "use": "sig"}, + {"kid": "key2", "kty": "RSA", "use": "sig"} + ] + } + mock_client = Mock() + mock_client.collaboration.oidc_keys_get.return_value = mock_keys + mock_get_client.return_value = mock_client + + # Execute + result = frontend_collaboration_oidc_keys_show( + cmd=Mock(), + collaboration_id="test-collab-123" + ) + + # Verify + self.assertEqual(len(result["keys"]), 2) + self.assertEqual(result["keys"][0]["kid"], "key1") + mock_client.collaboration.oidc_keys_get.assert_called_once_with( + "test-collab-123") + + # Report Tests + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_show_collaboration_report(self, mock_get_client): + """Test showing collaboration report""" + # Mock report response + mock_report = { + "collaborationId": "test-collab-123", + "reportData": { + "totalQueries": 42, + "totalDatasets": 10, + "participants": 5 + }, + "generatedAt": "2024-01-01T00:00:00Z" + } + mock_client = Mock() + mock_client.collaboration.report_get.return_value = mock_report + mock_get_client.return_value = mock_client + + # Execute + result = frontend_collaboration_report_show( + cmd=Mock(), + collaboration_id="test-collab-123" + ) + + # Verify + self.assertEqual(result["collaborationId"], "test-collab-123") + self.assertEqual(result["reportData"]["totalQueries"], 42) + mock_client.collaboration.report_get.assert_called_once_with( + "test-collab-123") + + # Dataset Queries Tests + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_dataset_queries_list(self, mock_get_client): + """Test listing queries for a dataset""" + # Mock queries list response + mock_queries = [ + {"queryId": "query-1", "datasetId": "dataset-123", "name": "Query 1"}, + {"queryId": "query-2", "datasetId": "dataset-123", "name": "Query 2"} + ] + mock_client = Mock() + mock_client.collaboration.analytics_datasets_document_id_queries_get.return_value = mock_queries + mock_get_client.return_value = mock_client + + # Execute + result = frontend_collaboration_dataset_queries_list( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="dataset-123" + ) + + # Verify + self.assertEqual(len(result), 2) + self.assertEqual(result[0]["queryId"], "query-1") + self.assertEqual(result[1]["queryId"], "query-2") + mock_client.collaboration.analytics_datasets_document_id_queries_get.assert_called_once_with( + "test-collab-123", "dataset-123") + + # Filter Tests + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_list_collaborations_with_active_only_filter(self, mock_get_client): + """Test listing collaborations with active_only filter""" + # Mock the client + mock_client = Mock() + mock_client.collaboration.list.return_value = [ + {"collaborationId": "collab-1", "name": "Active Collab 1", "status": "active"} + ] + mock_get_client.return_value = mock_client + + # Execute + result = frontend_collaboration_list( + cmd=Mock(), + active_only=True + ) + + # Verify + self.assertEqual(len(result), 1) + self.assertEqual(result[0]["status"], "active") + mock_client.collaboration.list.assert_called_once_with(active_only=True) + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_show_collaboration_with_active_only_filter(self, mock_get_client): + """Test showing collaboration with active_only check""" + # Mock the client + mock_client = Mock() + mock_client.collaboration.id_get.return_value = MOCK_COLLABORATION + mock_get_client.return_value = mock_client + + # Execute + result = frontend_collaboration_show( + cmd=Mock(), + collaboration_id="test-collab-123", + active_only=True + ) + + # Verify + self.assertEqual(result["collaborationId"], "test-collab-123") + mock_client.collaboration.id_get.assert_called_once_with( + "test-collab-123", active_only=True) + + if __name__ == '__main__': unittest.main() diff --git a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_misc.py b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_misc.py index 613ee1886ce..81647f9d4e3 100644 --- a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_misc.py +++ b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_misc.py @@ -17,8 +17,7 @@ frontend_collaboration_consent_check, frontend_collaboration_consent_set, frontend_collaboration_audit_list, - frontend_collaboration_attestation_cgs, - frontend_collaboration_attestation_cleanroom + frontend_collaboration_analytics_secret_set ) @@ -59,7 +58,7 @@ def test_list_invitations(self, mock_get_client): self.assertEqual(result[0]["invitationId"], "invite-1") self.assertEqual(result[1]["invitationId"], "invite-2") mock_client.collaboration.invitations_get.assert_called_once_with( - "test-collab-123") + "test-collab-123", pending_only=False) @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') def test_show_invitation(self, mock_get_client): @@ -145,7 +144,7 @@ def test_set_consent(self, mock_get_client): """Test setting consent action""" # Mock the client and its method chain mock_client = Mock() - mock_client.collaboration.set_consent_document_id_consent_action_post.return_value = { + mock_client.collaboration.set_consent_document_id_put.return_value = { "documentId": "doc-123", "action": "enable", "updatedAt": "2024-01-01T00:00:00Z"} mock_get_client.return_value = mock_client @@ -159,8 +158,8 @@ def test_set_consent(self, mock_get_client): # Verify self.assertEqual(result["action"], "enable") - mock_client.collaboration.set_consent_document_id_consent_action_post.assert_called_once_with( - "test-collab-123", "doc-123", "enable") + mock_client.collaboration.set_consent_document_id_put.assert_called_once_with( + "test-collab-123", "doc-123", body={"consentAction": "enable"}) # Audit Test @@ -196,57 +195,101 @@ def test_list_audit_logs(self, mock_get_client): self.assertEqual(result[0]["logId"], "test-log-123") self.assertEqual(result[1]["logId"], "log-456") mock_client.collaboration.analytics_auditevents_get.assert_called_once_with( - "test-collab-123") + "test-collab-123", scope=None, from_seqno=None, to_seqno=None) - # Attestation Tests + # Analytics Secret Tests @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') - def test_get_attestation_cgs(self, mock_get_client): - """Test getting CGS attestation""" - # Mock the client and its method chain + def test_analytics_secret_set(self, mock_get_client): + """Test setting analytics secret""" + # Mock the client mock_client = Mock() - mock_client.collaboration.attestationreport_cgs_get.return_value = { - "attestationType": "cgs", - "token": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9...", - "issuedAt": "2024-01-01T00:00:00Z" + mock_client.collaboration.analytics_secrets_secret_name_put.return_value = { + "secretName": "test-secret", + "status": "set", + "updatedAt": "2024-01-01T00:00:00Z" } mock_get_client.return_value = mock_client # Execute - result = frontend_collaboration_attestation_cgs( + result = frontend_collaboration_analytics_secret_set( cmd=Mock(), - collaboration_id="test-collab-123" + collaboration_id="test-collab-123", + secret_name="test-secret", + secret_value="secret-value-123" ) # Verify - self.assertEqual(result["attestationType"], "cgs") - self.assertIn("token", result) - mock_client.collaboration.attestationreport_cgs_get.assert_called_once_with( - "test-collab-123") + self.assertEqual(result["secretName"], "test-secret") + self.assertEqual(result["status"], "set") + mock_client.collaboration.analytics_secrets_secret_name_put.assert_called_once_with( + "test-collab-123", "test-secret", body={"secretValue": "secret-value-123"}) + + # Invitation Filter Tests @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') - def test_get_attestation_cleanroom(self, mock_get_client): - """Test getting cleanroom attestation""" - # Mock the client and its method chain + def test_list_invitations_with_pending_only_filter(self, mock_get_client): + """Test listing invitations with pending_only filter""" + # Mock the client mock_client = Mock() - mock_client.collaboration.attestationreport_cleanroom_get.return_value = { - "attestationType": "cleanroom", - "token": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9...", - "issuedAt": "2024-01-01T00:00:00Z", - "cleanroomId": "cleanroom-123"} + mock_client.collaboration.invitations_get.return_value = [ + { + "invitationId": "invite-1", + "collaborationId": "test-collab-123", + "inviteeEmail": "user1@example.com", + "status": "pending" + } + ] mock_get_client.return_value = mock_client # Execute - result = frontend_collaboration_attestation_cleanroom( + result = frontend_collaboration_invitation_list( cmd=Mock(), - collaboration_id="test-collab-123" + collaboration_id="test-collab-123", + pending_only=True + ) + + # Verify + self.assertEqual(len(result), 1) + self.assertEqual(result[0]["status"], "pending") + mock_client.collaboration.invitations_get.assert_called_once_with( + "test-collab-123", pending_only=True) + + # Audit Filter Tests + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_list_audit_with_filters(self, mock_get_client): + """Test listing audit logs with filters""" + # Mock the client + mock_client = Mock() + mock_client.collaboration.analytics_auditevents_get.return_value = [ + { + "logId": "test-log-123", + "timestamp": "2024-01-01T00:00:00Z", + "action": "query_executed", + "userId": "user-123" + } + ] + mock_get_client.return_value = mock_client + + # Execute + result = frontend_collaboration_audit_list( + cmd=Mock(), + collaboration_id="test-collab-123", + scope="analytics", + from_seqno=100, + to_seqno=200 ) # Verify - self.assertEqual(result["attestationType"], "cleanroom") - self.assertIn("cleanroomId", result) - mock_client.collaboration.attestationreport_cleanroom_get.assert_called_once_with( - "test-collab-123") + self.assertEqual(len(result), 1) + self.assertEqual(result[0]["action"], "query_executed") + mock_client.collaboration.analytics_auditevents_get.assert_called_once_with( + "test-collab-123", + scope="analytics", + from_seqno=100, + to_seqno=200) + if __name__ == '__main__': diff --git a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py index e66fe5c3f01..f55829faf48 100644 --- a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py +++ b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py @@ -15,8 +15,7 @@ frontend_collaboration_query_show, frontend_collaboration_query_publish, frontend_collaboration_query_run, - frontend_collaboration_query_vote_accept, - frontend_collaboration_query_vote_reject, + frontend_collaboration_query_vote, frontend_collaboration_query_runhistory_list, frontend_collaboration_query_runresult_show ) @@ -156,42 +155,42 @@ def test_vote_accept_query(self, mock_get_client): """Test accepting a query vote""" # Mock the client and its method chain mock_client = Mock() - mock_client.collaboration.analytics_queries_document_id_vote_accept_post.return_value = { - "queryId": "test-query-123", "voteStatus": "accepted", "votedAt": "2024-01-01T00:00:00Z"} + mock_client.collaboration.analytics_queries_document_id_vote_post.return_value = None # 204 No Content mock_get_client.return_value = mock_client # Execute - result = frontend_collaboration_query_vote_accept( + result = frontend_collaboration_query_vote( cmd=Mock(), collaboration_id="test-collab-123", - document_id="test-query-123" + document_id="test-query-123", + vote_action="accept" ) # Verify - self.assertEqual(result["voteStatus"], "accepted") - mock_client.collaboration.analytics_queries_document_id_vote_accept_post.assert_called_once_with( - "test-collab-123", "test-query-123", body=None) + self.assertIsNone(result) # 204 No Content returns None + mock_client.collaboration.analytics_queries_document_id_vote_post.assert_called_once_with( + "test-collab-123", "test-query-123", body={"voteAction": "accept"}) @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') def test_vote_reject_query(self, mock_get_client): """Test rejecting a query vote""" # Mock the client and its method chain mock_client = Mock() - mock_client.collaboration.analytics_queries_document_id_vote_reject_post.return_value = { - "queryId": "test-query-123", "voteStatus": "rejected", "votedAt": "2024-01-01T00:00:00Z"} + mock_client.collaboration.analytics_queries_document_id_vote_post.return_value = None # 204 No Content mock_get_client.return_value = mock_client # Execute - result = frontend_collaboration_query_vote_reject( + result = frontend_collaboration_query_vote( cmd=Mock(), collaboration_id="test-collab-123", - document_id="test-query-123" + document_id="test-query-123", + vote_action="reject" ) # Verify - self.assertEqual(result["voteStatus"], "rejected") - mock_client.collaboration.analytics_queries_document_id_vote_reject_post.assert_called_once_with( - "test-collab-123", "test-query-123", body=None) + self.assertIsNone(result) # 204 No Content returns None + mock_client.collaboration.analytics_queries_document_id_vote_post.assert_called_once_with( + "test-collab-123", "test-query-123", body={"voteAction": "reject"}) # Query Run History Tests diff --git a/src/managedcleanroom/setup.py b/src/managedcleanroom/setup.py index 482f23f5044..1c7d7a36b43 100644 --- a/src/managedcleanroom/setup.py +++ b/src/managedcleanroom/setup.py @@ -10,7 +10,7 @@ # HISTORY.rst entry. -VERSION = '1.0.0b2' +VERSION = '1.0.0b4' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From 1e675144ecf6370ad31e37a2849ceb2f6a8d1ebd Mon Sep 17 00:00:00 2001 From: Saksham Garg Date: Wed, 18 Mar 2026 12:25:32 +0530 Subject: [PATCH 02/10] update ManagedCleanroom frontend cmdlets --- src/managedcleanroom/HISTORY.rst | 5 +- .../azext_managedcleanroom/_frontend_auth.py | 15 +- .../_frontend_commands.py | 8 +- .../_frontend_custom.py | 185 ++++++--- .../azext_managedcleanroom/_help.py | 6 + .../azext_managedcleanroom/_params.py | 11 + .../analytics_frontend_api/__init__.py | 1 + .../analytics_frontend_api/_client.py | 5 +- .../analytics_frontend_api/_configuration.py | 6 + .../analytics_frontend_api/_utils/__init__.py | 1 - .../_utils/serialization.py | 3 +- .../analytics_frontend_api/aio/_client.py | 5 +- .../aio/_configuration.py | 6 + .../aio/operations/_operations.py | 26 ++ .../operations/_operations.py | 376 +++++++++++++++++- .../latest/test_frontend_collaboration.py | 7 +- .../tests/latest/test_frontend_misc.py | 6 +- .../tests/latest/test_frontend_query.py | 6 +- 18 files changed, 581 insertions(+), 97 deletions(-) diff --git a/src/managedcleanroom/HISTORY.rst b/src/managedcleanroom/HISTORY.rst index 32e1436f3a8..74c24f72780 100644 --- a/src/managedcleanroom/HISTORY.rst +++ b/src/managedcleanroom/HISTORY.rst @@ -5,6 +5,10 @@ Release History 1.0.0b4 +++++++ +* Regenerated analytics_frontend_api SDK from V2026_03_01_Preview OpenAPI specification +* BREAKING CHANGE: All frontend API endpoints now require api-version=2026-03-01-preview query parameter +* Added: --api-version parameter to all frontend commands (default: 2026-03-01-preview) +* Updated: SDK client now automatically injects api-version into all API requests * BREAKING CHANGE: Removed deprecated commands (APIs no longer supported in SDK): - `az managedcleanroom frontend workloads list` - `az managedcleanroom frontend analytics deploymentinfo` @@ -24,7 +28,6 @@ Release History * Updated: Added --pending-only filter to invitation list command * Updated: Added --scope, --from-seqno, --to-seqno filters to audit event list command * Updated: Response structures modernized (many list endpoints now return structured objects with value arrays) -* Updated: Regenerated frontend SDK from OpenAPI spec to support new APIs 1.0.0b3 +++++++ diff --git a/src/managedcleanroom/azext_managedcleanroom/_frontend_auth.py b/src/managedcleanroom/azext_managedcleanroom/_frontend_auth.py index 5b7ba8e7929..138dce170f1 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_frontend_auth.py +++ b/src/managedcleanroom/azext_managedcleanroom/_frontend_auth.py @@ -30,10 +30,12 @@ def get_frontend_token(cmd): profile = Profile(cli_ctx=cmd.cli_ctx) subscription = get_subscription_id(cmd.cli_ctx) - # Priority 0: explicit token via environment variable (for local/test envs only) + # Priority 0: explicit token via environment variable (for local/test envs + # only) env_token = os.environ.get('MANAGEDCLEANROOM_ACCESS_TOKEN') if env_token: - logger.warning("Using token from MANAGEDCLEANROOM_ACCESS_TOKEN env var FOR TESTING PURPOSES ONLY") + logger.warning( + "Using token from MANAGEDCLEANROOM_ACCESS_TOKEN env var FOR TESTING PURPOSES ONLY") from collections import namedtuple AccessToken = namedtuple('AccessToken', ['token', 'expires_on']) token_obj = AccessToken(token=env_token, expires_on=0) @@ -87,7 +89,7 @@ def set_frontend_config(cmd, endpoint): endpoint) -def get_frontend_client(cmd, endpoint=None): +def get_frontend_client(cmd, endpoint=None, api_version=None): """Create Analytics Frontend API client with Azure authentication Uses Profile.get_raw_token() to fetch access token from Azure context. @@ -96,12 +98,18 @@ def get_frontend_client(cmd, endpoint=None): :param cmd: CLI command context :param endpoint: Optional explicit endpoint URL (overrides config) :type endpoint: str + :param api_version: Optional API version (defaults to 2026-03-01-preview) + :type api_version: str :return: Configured AnalyticsFrontendAPI client :raises: CLIError if token fetch fails or endpoint not configured """ from .analytics_frontend_api import AnalyticsFrontendAPI from azure.core.pipeline.policies import BearerTokenCredentialPolicy, SansIOHTTPPolicy + # Use provided api_version or default + if api_version is None: + api_version = '2026-03-01-preview' + api_endpoint = endpoint or get_frontend_config(cmd) if not api_endpoint: raise CLIError( @@ -172,5 +180,6 @@ def on_request(self, request): # Return configured client return AnalyticsFrontendAPI( endpoint=api_endpoint, + api_version=api_version, authentication_policy=auth_policy ) diff --git a/src/managedcleanroom/azext_managedcleanroom/_frontend_commands.py b/src/managedcleanroom/azext_managedcleanroom/_frontend_commands.py index 702ba71dc9c..540c0617bbd 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_frontend_commands.py +++ b/src/managedcleanroom/azext_managedcleanroom/_frontend_commands.py @@ -37,7 +37,9 @@ def load_frontend_command_table(loader, _): # OIDC commands with loader.command_group('managedcleanroom frontend oidc', custom_command_type=frontend_custom) as g: - g.custom_command('set-issuer-url', 'frontend_collaboration_oidc_set_issuer_url') + g.custom_command( + 'set-issuer-url', + 'frontend_collaboration_oidc_set_issuer_url') g.custom_show_command('keys', 'frontend_collaboration_oidc_keys_show') # Keep issuerinfo for backwards compatibility @@ -56,7 +58,9 @@ def load_frontend_command_table(loader, _): g.custom_command('list', 'frontend_collaboration_dataset_list') g.custom_show_command('show', 'frontend_collaboration_dataset_show') g.custom_command('publish', 'frontend_collaboration_dataset_publish') - g.custom_command('queries', 'frontend_collaboration_dataset_queries_list') + g.custom_command( + 'queries', + 'frontend_collaboration_dataset_queries_list') # Consent commands with loader.command_group('managedcleanroom frontend consent', custom_command_type=frontend_custom) as g: diff --git a/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py b/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py index b6bde9e5470..40f1a870924 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py +++ b/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py @@ -16,30 +16,38 @@ # Base Collaboration Commands # ============================================================================ -def frontend_collaboration_list(cmd, active_only=False): +def frontend_collaboration_list(cmd, active_only=False, api_version=None): """List all collaborations :param cmd: CLI command context :param active_only: When true, returns only active collaborations (default: False) + :param api_version: API version to use for this request :return: List of collaboration objects with collaborationId, collaborationName, userStatus """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.list(active_only=active_only) -def frontend_collaboration_show(cmd, collaboration_id, active_only=False): +def frontend_collaboration_show( + cmd, + collaboration_id, + active_only=False, + api_version=None): """Show collaboration details :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param active_only: When true, queries only active collaborations (default: False) + :param api_version: API version to use for this request :return: Collaboration details """ - client = get_frontend_client(cmd) - return client.collaboration.id_get(collaboration_id, active_only=active_only) + client = get_frontend_client(cmd, api_version=api_version) + return client.collaboration.id_get( + collaboration_id, active_only=active_only) -def frontend_collaboration_report_show(cmd, collaboration_id): +def frontend_collaboration_report_show( + cmd, collaboration_id, api_version=None): """Get collaboration report (comprehensive attestation report) Replaces the deprecated attestation cgs and cleanroom commands. @@ -47,9 +55,10 @@ def frontend_collaboration_report_show(cmd, collaboration_id): :param cmd: CLI command context :param collaboration_id: Collaboration identifier + :param api_version: API version to use for this request :return: Collaboration report with cgs and consortiumManager attestation details """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.report_get(collaboration_id) @@ -57,25 +66,29 @@ def frontend_collaboration_report_show(cmd, collaboration_id): # Analytics Commands # ============================================================================ -def frontend_collaboration_analytics_show(cmd, collaboration_id): +def frontend_collaboration_analytics_show( + cmd, collaboration_id, api_version=None): """Show analytics information for a collaboration :param cmd: CLI command context :param collaboration_id: Collaboration identifier + :param api_version: API version to use for this request :return: Analytics information """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_get(collaboration_id) -def frontend_collaboration_analytics_cleanroompolicy(cmd, collaboration_id): +def frontend_collaboration_analytics_cleanroompolicy( + cmd, collaboration_id, api_version=None): """Get cleanroom policy for analytics workload :param cmd: CLI command context :param collaboration_id: Collaboration identifier + :param api_version: API version to use for this request :return: Cleanroom policy """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_cleanroompolicy_get( collaboration_id) @@ -84,38 +97,45 @@ def frontend_collaboration_analytics_cleanroompolicy(cmd, collaboration_id): # OIDC Commands # ============================================================================ -def frontend_collaboration_oidc_issuerinfo_show(cmd, collaboration_id): +def frontend_collaboration_oidc_issuerinfo_show( + cmd, collaboration_id, api_version=None): """Show OIDC issuer information :param cmd: CLI command context :param collaboration_id: Collaboration identifier + :param api_version: API version to use for this request :return: OIDC issuer information """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.oidc_issuer_info_get(collaboration_id) -def frontend_collaboration_oidc_set_issuer_url(cmd, collaboration_id, url): +def frontend_collaboration_oidc_set_issuer_url( + cmd, collaboration_id, url, api_version=None): """Set collaboration OIDC issuer URL :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param url: OIDC issuer URL + :param api_version: API version to use for this request :return: Operation result """ body = {"url": url} - client = get_frontend_client(cmd) - return client.collaboration.oidc_set_issuer_url_post(collaboration_id, body=body) + client = get_frontend_client(cmd, api_version=api_version) + return client.collaboration.oidc_set_issuer_url_post( + collaboration_id, body=body) -def frontend_collaboration_oidc_keys_show(cmd, collaboration_id): +def frontend_collaboration_oidc_keys_show( + cmd, collaboration_id, api_version=None): """Get collaboration OIDC signing keys (JWKS format) :param cmd: CLI command context :param collaboration_id: Collaboration identifier + :param api_version: API version to use for this request :return: OIDC keys in JWKS format """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.oidc_keys_get(collaboration_id) @@ -123,42 +143,50 @@ def frontend_collaboration_oidc_keys_show(cmd, collaboration_id): # Invitation Commands # ============================================================================ -def frontend_collaboration_invitation_list(cmd, collaboration_id, pending_only=False): +def frontend_collaboration_invitation_list( + cmd, + collaboration_id, + pending_only=False, + api_version=None): """List invitations for a collaboration :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param pending_only: When true, returns only pending invitations (default: False) + :param api_version: API version to use for this request :return: Invitations object with array of invitation details """ - client = get_frontend_client(cmd) - return client.collaboration.invitations_get(collaboration_id, pending_only=pending_only) + client = get_frontend_client(cmd, api_version=api_version) + return client.collaboration.invitations_get( + collaboration_id, pending_only=pending_only) def frontend_collaboration_invitation_show( - cmd, collaboration_id, invitation_id): + cmd, collaboration_id, invitation_id, api_version=None): """Show invitation details :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param invitation_id: Invitation identifier + :param api_version: API version to use for this request :return: Invitation details """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.invitation_id_get( collaboration_id, invitation_id) def frontend_collaboration_invitation_accept( - cmd, collaboration_id, invitation_id): + cmd, collaboration_id, invitation_id, api_version=None): """Accept an invitation :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param invitation_id: Invitation identifier + :param api_version: API version to use for this request :return: Acceptance result """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.invitation_id_accept_post( collaboration_id, invitation_id) @@ -167,38 +195,46 @@ def frontend_collaboration_invitation_accept( # Dataset Commands # ============================================================================ -def frontend_collaboration_dataset_list(cmd, collaboration_id): +def frontend_collaboration_dataset_list( + cmd, collaboration_id, api_version=None): """List datasets for a collaboration :param cmd: CLI command context :param collaboration_id: Collaboration identifier + :param api_version: API version to use for this request :return: List of datasets """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_datasets_list_get(collaboration_id) -def frontend_collaboration_dataset_show(cmd, collaboration_id, document_id): +def frontend_collaboration_dataset_show( + cmd, + collaboration_id, + document_id, + api_version=None): """Show dataset details :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param document_id: Dataset document identifier + :param api_version: API version to use for this request :return: Dataset details """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_dataset_document_id_get( collaboration_id, document_id) def frontend_collaboration_dataset_publish( - cmd, collaboration_id, document_id, body): + cmd, collaboration_id, document_id, body, api_version=None): """Publish a dataset :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param document_id: Dataset document identifier :param body: Publish configuration JSON (string, dict, or @file) + :param api_version: API version to use for this request :return: Publish result """ import json @@ -207,20 +243,22 @@ def frontend_collaboration_dataset_publish( if isinstance(body, str): body = json.loads(body) - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_dataset_document_id_publish_post( collaboration_id, document_id, body) -def frontend_collaboration_dataset_queries_list(cmd, collaboration_id, document_id): +def frontend_collaboration_dataset_queries_list( + cmd, collaboration_id, document_id, api_version=None): """List queries that use a specific dataset :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param document_id: Dataset document identifier + :param api_version: API version to use for this request :return: List of query IDs using this dataset """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_datasets_document_id_queries_get( collaboration_id, document_id) @@ -229,21 +267,26 @@ def frontend_collaboration_dataset_queries_list(cmd, collaboration_id, document_ # Consent Commands # ============================================================================ -def frontend_collaboration_consent_check(cmd, collaboration_id, document_id): +def frontend_collaboration_consent_check( + cmd, + collaboration_id, + document_id, + api_version=None): """Check consent document status :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param document_id: Consent document identifier + :param api_version: API version to use for this request :return: Consent status """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.check_consent_document_id_get( collaboration_id, document_id) def frontend_collaboration_consent_set( - cmd, collaboration_id, document_id, consent_action): + cmd, collaboration_id, document_id, consent_action, api_version=None): """Set consent document action NOTE: API changed - consent action is now 'enable' or 'disable' (not accept/reject) @@ -252,10 +295,11 @@ def frontend_collaboration_consent_set( :param collaboration_id: Collaboration identifier :param document_id: Consent document identifier :param consent_action: Consent action ('enable' or 'disable') + :param api_version: API version to use for this request :return: Action result """ body = {"consentAction": consent_action} - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.set_consent_document_id_put( collaboration_id, document_id, body=body ) @@ -265,38 +309,45 @@ def frontend_collaboration_consent_set( # Query Commands # ============================================================================ -def frontend_collaboration_query_list(cmd, collaboration_id): +def frontend_collaboration_query_list(cmd, collaboration_id, api_version=None): """List queries for a collaboration :param cmd: CLI command context :param collaboration_id: Collaboration identifier + :param api_version: API version to use for this request :return: List of queries """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_queries_list_get(collaboration_id) -def frontend_collaboration_query_show(cmd, collaboration_id, document_id): +def frontend_collaboration_query_show( + cmd, + collaboration_id, + document_id, + api_version=None): """Show query details :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param document_id: Query document identifier + :param api_version: API version to use for this request :return: Query details """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_queries_document_id_get( collaboration_id, document_id) def frontend_collaboration_query_publish( - cmd, collaboration_id, document_id, body): + cmd, collaboration_id, document_id, body, api_version=None): """Publish a query :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param document_id: Query document identifier :param body: Publish configuration JSON (string, dict, or @file) + :param api_version: API version to use for this request :return: Publish result """ import json @@ -305,7 +356,7 @@ def frontend_collaboration_query_publish( if isinstance(body, str): body = json.loads(body) - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_queries_document_id_publish_post( collaboration_id, document_id, body) @@ -314,7 +365,8 @@ def frontend_collaboration_query_run( cmd, collaboration_id, document_id, - body=None): + body=None, + api_version=None): """Run a query :param cmd: CLI command context @@ -322,6 +374,7 @@ def frontend_collaboration_query_run( :param document_id: Query document identifier :param body: Run configuration JSON (string, dict, or @file). Optional fields: runId (auto-generated if not provided), dryRun, startDate, endDate, useOptimizer + :param api_version: API version to use for this request :return: Run result """ import json @@ -339,12 +392,18 @@ def frontend_collaboration_query_run( if 'runId' not in body: body['runId'] = str(uuid.uuid4()) - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_queries_document_id_run_post( collaboration_id, document_id, body=body) -def frontend_collaboration_query_vote(cmd, collaboration_id, document_id, vote_action, proposal_id=None): +def frontend_collaboration_query_vote( + cmd, + collaboration_id, + document_id, + vote_action, + proposal_id=None, + api_version=None): """Vote on a query (unified accept/reject endpoint) :param cmd: CLI command context @@ -352,6 +411,7 @@ def frontend_collaboration_query_vote(cmd, collaboration_id, document_id, vote_a :param document_id: Query document identifier :param vote_action: Vote action ('accept' or 'reject') :param proposal_id: Optional proposal ID + :param api_version: API version to use for this request :return: Vote result (None on success - 204 No Content) """ body = { @@ -361,7 +421,7 @@ def frontend_collaboration_query_vote(cmd, collaboration_id, document_id, vote_a if proposal_id: body["proposalId"] = proposal_id - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_queries_document_id_vote_post( collaboration_id, document_id, body=body) @@ -371,30 +431,32 @@ def frontend_collaboration_query_vote(cmd, collaboration_id, document_id, vote_a # ============================================================================ def frontend_collaboration_query_runhistory_list( - cmd, collaboration_id, document_id): + cmd, collaboration_id, document_id, api_version=None): """List query run history :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param document_id: Query document identifier + :param api_version: API version to use for this request :return: List of query runs """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_queries_document_id_runhistory_get( collaboration_id, document_id ) def frontend_collaboration_query_runresult_show( - cmd, collaboration_id, job_id): + cmd, collaboration_id, job_id, api_version=None): """Show query job result details :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param job_id: Query job identifier + :param api_version: API version to use for this request :return: Query job result details """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_queries_jobid_get( collaboration_id, job_id) @@ -403,7 +465,13 @@ def frontend_collaboration_query_runresult_show( # Audit Commands # ============================================================================ -def frontend_collaboration_audit_list(cmd, collaboration_id, scope=None, from_seqno=None, to_seqno=None): +def frontend_collaboration_audit_list( + cmd, + collaboration_id, + scope=None, + from_seqno=None, + to_seqno=None, + api_version=None): """List audit events for a collaboration :param cmd: CLI command context @@ -411,24 +479,31 @@ def frontend_collaboration_audit_list(cmd, collaboration_id, scope=None, from_se :param scope: Optional scope filter :param from_seqno: Optional starting sequence number :param to_seqno: Optional ending sequence number + :param api_version: API version to use for this request :return: Paginated audit events with nextLink and value array """ - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_auditevents_get( collaboration_id, scope=scope, from_seqno=from_seqno, to_seqno=to_seqno) -def frontend_collaboration_analytics_secret_set(cmd, collaboration_id, secret_name, secret_value): +def frontend_collaboration_analytics_secret_set( + cmd, + collaboration_id, + secret_name, + secret_value, + api_version=None): """Set secret for analytics workload :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param secret_name: Secret name :param secret_value: Secret value + :param api_version: API version to use for this request :return: Operation result """ body = {"secretValue": secret_value} - client = get_frontend_client(cmd) + client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_secrets_secret_name_put( collaboration_id, secret_name, body=body) diff --git a/src/managedcleanroom/azext_managedcleanroom/_help.py b/src/managedcleanroom/azext_managedcleanroom/_help.py index 24646974c2d..a80dcbf9512 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_help.py +++ b/src/managedcleanroom/azext_managedcleanroom/_help.py @@ -24,6 +24,12 @@ 2. Azure CLI authentication (az login) You must configure the Analytics Frontend API endpoint URL before using these commands. + + All frontend commands support the --api-version parameter to specify the API version. + Default: 2026-03-01-preview + + Supported versions: + - 2026-03-01-preview """ helps['managedcleanroom frontend login'] = """ diff --git a/src/managedcleanroom/azext_managedcleanroom/_params.py b/src/managedcleanroom/azext_managedcleanroom/_params.py index ba911114549..ce5a76bbe38 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_params.py +++ b/src/managedcleanroom/azext_managedcleanroom/_params.py @@ -12,6 +12,17 @@ def load_arguments(self, _): # pylint: disable=unused-argument from knack.arguments import CLIArgumentType + # Global API versioning parameter for all frontend commands + with self.argument_context('managedcleanroom frontend') as c: + from azure.cli.core.commands.parameters import get_enum_type + c.argument( + 'api_version', + options_list=['--api-version'], + help='API version to use for this request. Default: 2026-03-01-preview', + arg_type=get_enum_type( + ['2026-03-01-preview']), + default='2026-03-01-preview') + # Define argument types for frontend commands collaboration_id_type = CLIArgumentType( options_list=['--collaboration-id', '-c'], diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/__init__.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/__init__.py index e3229dff489..1ac89280fb8 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/__init__.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/__init__.py @@ -14,6 +14,7 @@ try: from ._patch import __all__ as _patch_all + from ._patch import * # noqa: F403 except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_client.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_client.py index 68b4266b83d..1a84e3e8aae 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_client.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_client.py @@ -17,7 +17,7 @@ from .operations import CollaborationOperations -class AnalyticsFrontendAPI: # pylint: disable=client-accepts-api-version-keyword +class AnalyticsFrontendAPI: """Analytics Frontend API. Use following command to generate the typescript client - npx openapi-typescript ./openapi.yaml --output ./typescript.ts. @@ -25,6 +25,9 @@ class AnalyticsFrontendAPI: # pylint: disable=client-accepts-api-version-keywor :vartype collaboration: analytics_frontend_api.operations.CollaborationOperations :keyword endpoint: Service URL. Required. Default value is "". :paramtype endpoint: str + :keyword api_version: Api Version. Default value is "2026-03-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str """ def __init__( # pylint: disable=missing-client-constructor-parameter-credential diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_configuration.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_configuration.py index 71a9173c647..15b0d18a3e3 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_configuration.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_configuration.py @@ -16,10 +16,16 @@ class AnalyticsFrontendAPIConfiguration: # pylint: disable=too-many-instance-at Note that all parameters used to create this instance are saved as instance attributes. + + :keyword api_version: Api Version. Default value is "2026-03-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str """ def __init__(self, **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2026-03-01-preview") + self.api_version = api_version kwargs.setdefault( "sdk_moniker", "analyticsfrontendapi/{}".format(VERSION)) diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/__init__.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/__init__.py index 006671542af..bb9094db921 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/__init__.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/__init__.py @@ -1,4 +1,3 @@ -# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/serialization.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/serialization.py index aae648be171..6d77cffbf80 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/serialization.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/serialization.py @@ -1,4 +1,5 @@ -# pylint: disable=line-too-long,useless-suppression,too-many-lines,protected-access,broad-exception-caught +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# pylint: disable=protected-access,broad-exception-caught,multiple-statements # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_client.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_client.py index 91762a99f99..eeb74e28e1c 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_client.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_client.py @@ -17,7 +17,7 @@ from .operations import CollaborationOperations -class AnalyticsFrontendAPI: # pylint: disable=client-accepts-api-version-keyword +class AnalyticsFrontendAPI: """Analytics Frontend API. Use following command to generate the typescript client - npx openapi-typescript ./openapi.yaml --output ./typescript.ts. @@ -25,6 +25,9 @@ class AnalyticsFrontendAPI: # pylint: disable=client-accepts-api-version-keywor :vartype collaboration: analytics_frontend_api.aio.operations.CollaborationOperations :keyword endpoint: Service URL. Required. Default value is "". :paramtype endpoint: str + :keyword api_version: Api Version. Default value is "2026-03-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str """ def __init__( # pylint: disable=missing-client-constructor-parameter-credential diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_configuration.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_configuration.py index f5705385f6c..6ce8e57c748 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_configuration.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_configuration.py @@ -16,10 +16,16 @@ class AnalyticsFrontendAPIConfiguration: # pylint: disable=too-many-instance-at Note that all parameters used to create this instance are saved as instance attributes. + + :keyword api_version: Api Version. Default value is "2026-03-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str """ def __init__(self, **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2026-03-01-preview") + self.api_version = api_version kwargs.setdefault( "sdk_moniker", "analyticsfrontendapi/{}".format(VERSION)) diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_operations.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_operations.py index b61bbc3e5d9..b7ff58f9fba 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_operations.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_operations.py @@ -125,6 +125,7 @@ async def list( _request = build_collaboration_list_request( active_only=active_only, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -213,6 +214,7 @@ async def id_get( _request = build_collaboration_id_get_request( collaboration_id=collaboration_id, active_only=active_only, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -322,6 +324,7 @@ async def report_get(self, collaboration_id: str, **kwargs: Any) -> JSON: _request = build_collaboration_report_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -405,6 +408,7 @@ async def analytics_get( _request = build_collaboration_analytics_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -490,6 +494,7 @@ async def analytics_cleanroompolicy_get( _request = build_collaboration_analytics_cleanroompolicy_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -575,6 +580,7 @@ async def oidc_issuer_info_get( _request = build_collaboration_oidc_issuer_info_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -768,6 +774,7 @@ async def oidc_set_issuer_url_post(self, _request = build_collaboration_oidc_set_issuer_url_post_request( collaboration_id=collaboration_id, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -864,6 +871,7 @@ async def oidc_keys_get( _request = build_collaboration_oidc_keys_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -952,6 +960,7 @@ async def invitations_get( _request = build_collaboration_invitations_get_request( collaboration_id=collaboration_id, pending_only=pending_only, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1044,6 +1053,7 @@ async def invitation_id_get( _request = build_collaboration_invitation_id_get_request( collaboration_id=collaboration_id, invitation_id=invitation_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1122,6 +1132,7 @@ async def invitation_id_accept_post( _request = build_collaboration_invitation_id_accept_post_request( collaboration_id=collaboration_id, invitation_id=invitation_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1204,6 +1215,7 @@ async def analytics_datasets_list_get( _request = build_collaboration_analytics_datasets_list_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1332,6 +1344,7 @@ async def analytics_dataset_document_id_get( _request = build_collaboration_analytics_dataset_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1589,6 +1602,7 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -1676,6 +1690,7 @@ async def check_consent_document_id_get( _request = build_collaboration_check_consent_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1861,6 +1876,7 @@ async def set_consent_document_id_put(self, collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -2058,6 +2074,7 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -2144,6 +2161,7 @@ async def analytics_queries_list_get( _request = build_collaboration_analytics_queries_list_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -2250,6 +2268,7 @@ async def analytics_queries_document_id_get( _request = build_collaboration_analytics_queries_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -2437,6 +2456,7 @@ async def analytics_queries_document_id_vote_post( collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -2706,6 +2726,7 @@ async def analytics_queries_document_id_run_post( collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -2809,6 +2830,7 @@ async def analytics_queries_jobid_get( _request = build_collaboration_analytics_queries_jobid_get_request( collaboration_id=collaboration_id, jobid=jobid, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -2934,6 +2956,7 @@ async def analytics_queries_document_id_runhistory_get( # pylint: disable=name- _request = build_collaboration_analytics_queries_document_id_runhistory_get_request( collaboration_id=collaboration_id, document_id=document_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -3012,6 +3035,7 @@ async def analytics_datasets_document_id_queries_get( # pylint: disable=name-to _request = build_collaboration_analytics_datasets_document_id_queries_get_request( collaboration_id=collaboration_id, document_id=document_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -3208,6 +3232,7 @@ async def analytics_secrets_secret_name_put(self, collaboration_id=collaboration_id, secret_name=secret_name, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -3316,6 +3341,7 @@ async def analytics_auditevents_get( scope=scope, from_seqno=from_seqno, to_seqno=to_seqno, + api_version=self._config.api_version, headers=_headers, params=_params, ) diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_operations.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_operations.py index b1aa1e61d0a..b8ea95d3671 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_operations.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_operations.py @@ -42,12 +42,17 @@ def build_collaboration_list_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = "/collaborations" # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") if active_only is not None: _params["activeOnly"] = _SERIALIZER.query( "active_only", active_only, "bool") @@ -69,6 +74,9 @@ def build_collaboration_id_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -80,6 +88,8 @@ def build_collaboration_id_get_request( _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") if active_only is not None: _params["activeOnly"] = _SERIALIZER.query( "active_only", active_only, "bool") @@ -99,7 +109,11 @@ def build_collaboration_report_get_request( collaboration_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -110,17 +124,30 @@ def build_collaboration_report_get_request( _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_get_request( # pylint: disable=name-too-long collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -131,17 +158,30 @@ def build_collaboration_analytics_get_request( # pylint: disable=name-too-long _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_cleanroompolicy_get_request( # pylint: disable=name-too-long collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -152,17 +192,30 @@ def build_collaboration_analytics_cleanroompolicy_get_request( # pylint: disabl _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_oidc_issuer_info_get_request( # pylint: disable=name-too-long collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -173,20 +226,33 @@ def build_collaboration_oidc_issuer_info_get_request( # pylint: disable=name-to _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_oidc_set_issuer_url_post_request( # pylint: disable=name-too-long collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop( "Content-Type", None)) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -197,20 +263,33 @@ def build_collaboration_oidc_set_issuer_url_post_request( # pylint: disable=nam _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header( "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_oidc_keys_get_request( # pylint: disable=name-too-long collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -221,10 +300,19 @@ def build_collaboration_oidc_keys_get_request( # pylint: disable=name-too-long _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_invitations_get_request( # pylint: disable=name-too-long @@ -233,6 +321,9 @@ def build_collaboration_invitations_get_request( # pylint: disable=name-too-lon _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -244,6 +335,8 @@ def build_collaboration_invitations_get_request( # pylint: disable=name-too-lon _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") if pending_only is not None: _params["pendingOnly"] = _SERIALIZER.query( "pending_only", pending_only, "bool") @@ -263,7 +356,11 @@ def build_collaboration_invitation_id_get_request( # pylint: disable=name-too-l collaboration_id: str, invitation_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -281,17 +378,30 @@ def build_collaboration_invitation_id_get_request( # pylint: disable=name-too-l _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_invitation_id_accept_post_request( # pylint: disable=name-too-long collaboration_id: str, invitation_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -309,17 +419,30 @@ def build_collaboration_invitation_id_accept_post_request( # pylint: disable=na _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_datasets_list_get_request( # pylint: disable=name-too-long collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -330,17 +453,30 @@ def build_collaboration_analytics_datasets_list_get_request( # pylint: disable= _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_dataset_document_id_get_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -352,20 +488,33 @@ def build_collaboration_analytics_dataset_document_id_get_request( # pylint: di _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_dataset_document_id_publish_post_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop( "Content-Type", None)) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -377,20 +526,33 @@ def build_collaboration_analytics_dataset_document_id_publish_post_request( # p _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header( "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_check_consent_document_id_get_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -402,20 +564,33 @@ def build_collaboration_check_consent_document_id_get_request( # pylint: disabl _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_set_consent_document_id_put_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop( "Content-Type", None)) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -427,23 +602,36 @@ def build_collaboration_set_consent_document_id_put_request( # pylint: disable= _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header( "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PUT", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_queries_document_id_publish_post_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop( "Content-Type", None)) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -455,20 +643,33 @@ def build_collaboration_analytics_queries_document_id_publish_post_request( # p _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header( "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_queries_list_get_request( # pylint: disable=name-too-long collaboration_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -479,17 +680,30 @@ def build_collaboration_analytics_queries_list_get_request( # pylint: disable=n _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_queries_document_id_get_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -501,20 +715,33 @@ def build_collaboration_analytics_queries_document_id_get_request( # pylint: di _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_queries_document_id_vote_post_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop( "Content-Type", None)) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -526,23 +753,36 @@ def build_collaboration_analytics_queries_document_id_vote_post_request( # pyli _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header( "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_queries_document_id_run_post_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop( "Content-Type", None)) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -554,20 +794,33 @@ def build_collaboration_analytics_queries_document_id_run_post_request( # pylin _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header( "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_queries_jobid_get_request( # pylint: disable=name-too-long collaboration_id: str, jobid: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -579,17 +832,30 @@ def build_collaboration_analytics_queries_jobid_get_request( # pylint: disable= _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_queries_document_id_runhistory_get_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -601,17 +867,30 @@ def build_collaboration_analytics_queries_document_id_runhistory_get_request( # _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_datasets_document_id_queries_get_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -623,20 +902,33 @@ def build_collaboration_analytics_datasets_document_id_queries_get_request( # p _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_secrets_secret_name_put_request( # pylint: disable=name-too-long collaboration_id: str, secret_name: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop( "content_type", _headers.pop( "Content-Type", None)) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -648,13 +940,22 @@ def build_collaboration_analytics_secrets_secret_name_put_request( # pylint: di _url: str = _url.format(**path_format_arguments) # type: ignore + # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") + # Construct headers if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header( "content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PUT", url=_url, headers=_headers, **kwargs) + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + **kwargs) def build_collaboration_analytics_auditevents_get_request( # pylint: disable=name-too-long @@ -668,6 +969,9 @@ def build_collaboration_analytics_auditevents_get_request( # pylint: disable=na _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + api_version: str = kwargs.pop( + "api_version", _params.pop( + "api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -679,6 +983,8 @@ def build_collaboration_analytics_auditevents_get_request( # pylint: disable=na _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters + _params["api-version"] = _SERIALIZER.query( + "api_version", api_version, "str") if scope is not None: _params["scope"] = _SERIALIZER.query("scope", scope, "str") if from_seqno is not None: @@ -759,6 +1065,7 @@ def list(self, *, active_only: bool = False, **kwargs: Any) -> List[JSON]: _request = build_collaboration_list_request( active_only=active_only, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -847,6 +1154,7 @@ def id_get( _request = build_collaboration_id_get_request( collaboration_id=collaboration_id, active_only=active_only, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -956,6 +1264,7 @@ def report_get(self, collaboration_id: str, **kwargs: Any) -> JSON: _request = build_collaboration_report_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1036,6 +1345,7 @@ def analytics_get(self, collaboration_id: str, **kwargs: Any) -> JSON: _request = build_collaboration_analytics_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1123,6 +1433,7 @@ def analytics_cleanroompolicy_get( _request = build_collaboration_analytics_cleanroompolicy_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1208,6 +1519,7 @@ def oidc_issuer_info_get( _request = build_collaboration_oidc_issuer_info_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1401,6 +1713,7 @@ def oidc_set_issuer_url_post(self, _request = build_collaboration_oidc_set_issuer_url_post_request( collaboration_id=collaboration_id, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -1494,6 +1807,7 @@ def oidc_keys_get(self, collaboration_id: str, **kwargs: Any) -> JSON: _request = build_collaboration_oidc_keys_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1582,6 +1896,7 @@ def invitations_get( _request = build_collaboration_invitations_get_request( collaboration_id=collaboration_id, pending_only=pending_only, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1674,6 +1989,7 @@ def invitation_id_get( _request = build_collaboration_invitation_id_get_request( collaboration_id=collaboration_id, invitation_id=invitation_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1754,6 +2070,7 @@ def invitation_id_accept_post( _request = build_collaboration_invitation_id_accept_post_request( collaboration_id=collaboration_id, invitation_id=invitation_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1838,6 +2155,7 @@ def analytics_datasets_list_get( _request = build_collaboration_analytics_datasets_list_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -1966,6 +2284,7 @@ def analytics_dataset_document_id_get( _request = build_collaboration_analytics_dataset_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -2223,6 +2542,7 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -2310,6 +2630,7 @@ def check_consent_document_id_get( _request = build_collaboration_check_consent_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -2495,6 +2816,7 @@ def set_consent_document_id_put(self, collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -2692,6 +3014,7 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -2778,6 +3101,7 @@ def analytics_queries_list_get( _request = build_collaboration_analytics_queries_list_get_request( collaboration_id=collaboration_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -2884,6 +3208,7 @@ def analytics_queries_document_id_get( _request = build_collaboration_analytics_queries_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -3074,6 +3399,7 @@ def analytics_queries_document_id_vote_post(self, collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -3343,6 +3669,7 @@ def analytics_queries_document_id_run_post( collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -3446,6 +3773,7 @@ def analytics_queries_jobid_get( _request = build_collaboration_analytics_queries_jobid_get_request( collaboration_id=collaboration_id, jobid=jobid, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -3571,6 +3899,7 @@ def analytics_queries_document_id_runhistory_get( # pylint: disable=name-too-lo _request = build_collaboration_analytics_queries_document_id_runhistory_get_request( collaboration_id=collaboration_id, document_id=document_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -3649,6 +3978,7 @@ def analytics_datasets_document_id_queries_get( # pylint: disable=name-too-long _request = build_collaboration_analytics_datasets_document_id_queries_get_request( collaboration_id=collaboration_id, document_id=document_id, + api_version=self._config.api_version, headers=_headers, params=_params, ) @@ -3845,6 +4175,7 @@ def analytics_secrets_secret_name_put(self, collaboration_id=collaboration_id, secret_name=secret_name, content_type=content_type, + api_version=self._config.api_version, json=_json, content=_content, headers=_headers, @@ -3953,6 +4284,7 @@ def analytics_auditevents_get( scope=scope, from_seqno=from_seqno, to_seqno=to_seqno, + api_version=self._config.api_version, headers=_headers, params=_params, ) diff --git a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_collaboration.py b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_collaboration.py index 5bd0613897d..4aeb88327a7 100644 --- a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_collaboration.py +++ b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_collaboration.py @@ -288,7 +288,8 @@ def test_dataset_queries_list(self, mock_get_client): # Filter Tests @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') - def test_list_collaborations_with_active_only_filter(self, mock_get_client): + def test_list_collaborations_with_active_only_filter( + self, mock_get_client): """Test listing collaborations with active_only filter""" # Mock the client mock_client = Mock() @@ -306,7 +307,8 @@ def test_list_collaborations_with_active_only_filter(self, mock_get_client): # Verify self.assertEqual(len(result), 1) self.assertEqual(result[0]["status"], "active") - mock_client.collaboration.list.assert_called_once_with(active_only=True) + mock_client.collaboration.list.assert_called_once_with( + active_only=True) @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') def test_show_collaboration_with_active_only_filter(self, mock_get_client): @@ -329,6 +331,5 @@ def test_show_collaboration_with_active_only_filter(self, mock_get_client): "test-collab-123", active_only=True) - if __name__ == '__main__': unittest.main() diff --git a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_misc.py b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_misc.py index 81647f9d4e3..d70a49548a8 100644 --- a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_misc.py +++ b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_misc.py @@ -285,11 +285,7 @@ def test_list_audit_with_filters(self, mock_get_client): self.assertEqual(len(result), 1) self.assertEqual(result[0]["action"], "query_executed") mock_client.collaboration.analytics_auditevents_get.assert_called_once_with( - "test-collab-123", - scope="analytics", - from_seqno=100, - to_seqno=200) - + "test-collab-123", scope="analytics", from_seqno=100, to_seqno=200) if __name__ == '__main__': diff --git a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py index f55829faf48..d5718a92b71 100644 --- a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py +++ b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py @@ -155,7 +155,8 @@ def test_vote_accept_query(self, mock_get_client): """Test accepting a query vote""" # Mock the client and its method chain mock_client = Mock() - mock_client.collaboration.analytics_queries_document_id_vote_post.return_value = None # 204 No Content + # 204 No Content + mock_client.collaboration.analytics_queries_document_id_vote_post.return_value = None mock_get_client.return_value = mock_client # Execute @@ -176,7 +177,8 @@ def test_vote_reject_query(self, mock_get_client): """Test rejecting a query vote""" # Mock the client and its method chain mock_client = Mock() - mock_client.collaboration.analytics_queries_document_id_vote_post.return_value = None # 204 No Content + # 204 No Content + mock_client.collaboration.analytics_queries_document_id_vote_post.return_value = None mock_get_client.return_value = mock_client # Execute From 22d5e5f77f4c72deae819a157d693b26201bdb93 Mon Sep 17 00:00:00 2001 From: Saksham Garg Date: Wed, 18 Mar 2026 12:42:13 +0530 Subject: [PATCH 03/10] Fix version: change from 1.0.0b4 to 1.0.0b3 - Updated VERSION in setup.py to 1.0.0b3 - Merged 1.0.0b4 changelog into 1.0.0b3 in HISTORY.rst - All features remain the same (API versioning support) - Added helper files to .gitignore Validation passed: - 37/37 pytest tests passed - azdev style: PASSED - azdev linter: PASSED - test_index.py: 9 tests passed (2 skipped) - aaz folder: unchanged --- .gitignore | 7 +++++++ src/managedcleanroom/HISTORY.rst | 6 +----- src/managedcleanroom/setup.py | 2 +- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index 3e9d06ca5e3..748a8c2d506 100644 --- a/.gitignore +++ b/.gitignore @@ -127,3 +127,10 @@ _az_debug/ # Ignore test results test_results.xml +.opencode/ +docs/agents.md +scripts/autofix_cleanroom.py +scripts/autofix_managedcleanroom.sh +scripts/validate_managedcleanroom.sh +src/managedcleanroom/cmdlet_testing_status_detailed.csv +src/managedcleanroom/cmdlet_testing_status.xlsx diff --git a/src/managedcleanroom/HISTORY.rst b/src/managedcleanroom/HISTORY.rst index 74c24f72780..10602f91b60 100644 --- a/src/managedcleanroom/HISTORY.rst +++ b/src/managedcleanroom/HISTORY.rst @@ -3,7 +3,7 @@ Release History =============== -1.0.0b4 +1.0.0b3 +++++++ * Regenerated analytics_frontend_api SDK from V2026_03_01_Preview OpenAPI specification * BREAKING CHANGE: All frontend API endpoints now require api-version=2026-03-01-preview query parameter @@ -28,10 +28,6 @@ Release History * Updated: Added --pending-only filter to invitation list command * Updated: Added --scope, --from-seqno, --to-seqno filters to audit event list command * Updated: Response structures modernized (many list endpoints now return structured objects with value arrays) - -1.0.0b3 -+++++++ -* Regenerate analytics_frontend_api SDK from updated frontend.yaml OpenAPI spec * Remove empty enum value from ApplicationState schema 1.0.0b1 diff --git a/src/managedcleanroom/setup.py b/src/managedcleanroom/setup.py index 1c7d7a36b43..b2fce87ebba 100644 --- a/src/managedcleanroom/setup.py +++ b/src/managedcleanroom/setup.py @@ -10,7 +10,7 @@ # HISTORY.rst entry. -VERSION = '1.0.0b4' +VERSION = '1.0.0b3' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers From d48ae3cf3a988bf8f78cd6b15d23d4717e135a4d Mon Sep 17 00:00:00 2001 From: Saksham Garg Date: Thu, 19 Mar 2026 14:44:35 +0530 Subject: [PATCH 04/10] Update to latest Frontend API spec from develop branch MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Pulled latest spec from azure-cleanroom develop branch - Fixed ApplicationState empty enum value (AutoRest blocker) - Regenerated analytics_frontend_api SDK with updated method signatures - Applied Microsoft MIT License headers to all SDK files - Updated custom code for SDK method renames (5 functions) - Updated test mocks for new SDK methods (14 changes across 3 files) - Added comprehensive pylint suppressions for generated code SDK Method Changes (internal, transparent to CLI): - list → list_get - analytics_dataset_* → analytics_datasets_* (plural) - check_consent_document_id_get → consent_document_id_get - set_consent_document_id_put → consent_document_id_put Validation (all passed): - pytest: 37/37 tests passed - azdev style: PASSED - azdev linter: PASSED - test_index: PASSED - aaz folder: unchanged - wheel: built successfully (157K) Version: 1.0.0b3 (kept same as requested) --- src/managedcleanroom/HISTORY.rst | 8 +- .../_frontend_custom.py | 10 +- .../analytics_frontend_api/__init__.py | 5 +- .../analytics_frontend_api/_client.py | 30 +- .../analytics_frontend_api/_configuration.py | 31 +- .../analytics_frontend_api/_patch.py | 3 +- .../_utils/serialization.py | 386 ++---- .../analytics_frontend_api/aio/__init__.py | 4 +- .../analytics_frontend_api/aio/_client.py | 21 +- .../aio/_configuration.py | 31 +- .../analytics_frontend_api/aio/_patch.py | 3 +- .../aio/operations/__init__.py | 4 +- .../aio/operations/_operations.py | 510 ++------ .../aio/operations/_patch.py | 3 +- .../operations/__init__.py | 4 +- .../operations/_operations.py | 1135 +++++------------ .../operations/_patch.py | 3 +- .../latest/test_frontend_collaboration.py | 10 +- .../tests/latest/test_frontend_dataset.py | 10 +- .../tests/latest/test_frontend_misc.py | 8 +- 20 files changed, 648 insertions(+), 1571 deletions(-) diff --git a/src/managedcleanroom/HISTORY.rst b/src/managedcleanroom/HISTORY.rst index 10602f91b60..4b0c94cdb68 100644 --- a/src/managedcleanroom/HISTORY.rst +++ b/src/managedcleanroom/HISTORY.rst @@ -5,7 +5,13 @@ Release History 1.0.0b3 +++++++ -* Regenerated analytics_frontend_api SDK from V2026_03_01_Preview OpenAPI specification +* Updated to latest Frontend API spec from develop branch (2026-03-01-preview) +* Regenerated analytics_frontend_api SDK with updated method signatures +* SDK Changes (internal, transparent to CLI users): + - Method renames: collaboration.list → collaboration.list_get + - Method renames: analytics_dataset_* → analytics_datasets_* (dataset → datasets, plural) + - Method renames: check_consent_document_id_get → consent_document_id_get + - Method renames: set_consent_document_id_put → consent_document_id_put * BREAKING CHANGE: All frontend API endpoints now require api-version=2026-03-01-preview query parameter * Added: --api-version parameter to all frontend commands (default: 2026-03-01-preview) * Updated: SDK client now automatically injects api-version into all API requests diff --git a/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py b/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py index 40f1a870924..39bf0ca0c7d 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py +++ b/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py @@ -25,7 +25,7 @@ def frontend_collaboration_list(cmd, active_only=False, api_version=None): :return: List of collaboration objects with collaborationId, collaborationName, userStatus """ client = get_frontend_client(cmd, api_version=api_version) - return client.collaboration.list(active_only=active_only) + return client.collaboration.list_get(active_only=active_only) def frontend_collaboration_show( @@ -222,7 +222,7 @@ def frontend_collaboration_dataset_show( :return: Dataset details """ client = get_frontend_client(cmd, api_version=api_version) - return client.collaboration.analytics_dataset_document_id_get( + return client.collaboration.analytics_datasets_document_id_get( collaboration_id, document_id) @@ -244,7 +244,7 @@ def frontend_collaboration_dataset_publish( body = json.loads(body) client = get_frontend_client(cmd, api_version=api_version) - return client.collaboration.analytics_dataset_document_id_publish_post( + return client.collaboration.analytics_datasets_document_id_publish_post( collaboration_id, document_id, body) @@ -281,7 +281,7 @@ def frontend_collaboration_consent_check( :return: Consent status """ client = get_frontend_client(cmd, api_version=api_version) - return client.collaboration.check_consent_document_id_get( + return client.collaboration.consent_document_id_get( collaboration_id, document_id) @@ -300,7 +300,7 @@ def frontend_collaboration_consent_set( """ body = {"consentAction": consent_action} client = get_frontend_client(cmd, api_version=api_version) - return client.collaboration.set_consent_document_id_put( + return client.collaboration.consent_document_id_put( collaboration_id, document_id, body=body ) diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/__init__.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/__init__.py index 1ac89280fb8..a2915430dea 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/__init__.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/__init__.py @@ -3,18 +3,19 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- +# flake8: noqa: F403 # pylint: disable=wrong-import-position from typing import TYPE_CHECKING if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import # noqa: F403 + from ._patch import * # pylint: disable=unused-wildcard-import from ._client import AnalyticsFrontendAPI # type: ignore try: from ._patch import __all__ as _patch_all - from ._patch import * # noqa: F403 + from ._patch import * except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_client.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_client.py index 1a84e3e8aae..a3ae5a123bd 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_client.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_client.py @@ -38,39 +38,28 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential _policies = kwargs.pop("policies", None) if _policies is None: _policies = [ - policies.RequestIdPolicy( - **kwargs), + policies.RequestIdPolicy(**kwargs), self._config.headers_policy, self._config.user_agent_policy, self._config.proxy_policy, - policies.ContentDecodePolicy( - **kwargs), + policies.ContentDecodePolicy(**kwargs), self._config.redirect_policy, self._config.retry_policy, self._config.authentication_policy, self._config.custom_hook_policy, self._config.logging_policy, - policies.DistributedTracingPolicy( - **kwargs), - policies.SensitiveHeaderCleanupPolicy( - **kwargs) if self._config.redirect_policy else None, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, self._config.http_logging_policy, ] - self._client: PipelineClient = PipelineClient( - base_url=endpoint, policies=_policies, **kwargs) + self._client: PipelineClient = PipelineClient(base_url=endpoint, policies=_policies, **kwargs) self._serialize = Serializer() self._deserialize = Deserializer() self._serialize.client_side_validation = False - self.collaboration = CollaborationOperations( - self._client, self._config, self._serialize, self._deserialize) - - def send_request( - self, - request: HttpRequest, - *, - stream: bool = False, - **kwargs: Any) -> HttpResponse: + self.collaboration = CollaborationOperations(self._client, self._config, self._serialize, self._deserialize) + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -90,8 +79,7 @@ def send_request( request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) - return self._client.send_request( - request_copy, stream=stream, **kwargs) # type: ignore + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore def close(self) -> None: self._client.close() diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_configuration.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_configuration.py index 15b0d18a3e3..23dc01e234d 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_configuration.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_configuration.py @@ -3,6 +3,7 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- +# pylint: disable=too-few-public-methods from typing import Any @@ -11,7 +12,7 @@ VERSION = "unknown" -class AnalyticsFrontendAPIConfiguration: # pylint: disable=too-many-instance-attributes,too-few-public-methods +class AnalyticsFrontendAPIConfiguration: # pylint: disable=too-many-instance-attributes """Configuration for AnalyticsFrontendAPI. Note that all parameters used to create this instance are saved as instance @@ -26,27 +27,17 @@ def __init__(self, **kwargs: Any) -> None: api_version: str = kwargs.pop("api_version", "2026-03-01-preview") self.api_version = api_version - kwargs.setdefault( - "sdk_moniker", - "analyticsfrontendapi/{}".format(VERSION)) + kwargs.setdefault("sdk_moniker", "analyticsfrontendapi/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) def _configure(self, **kwargs: Any) -> None: - self.user_agent_policy = kwargs.get( - "user_agent_policy") or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get( - "headers_policy") or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get( - "proxy_policy") or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get( - "logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get( - "http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.custom_hook_policy = kwargs.get( - "custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get( - "redirect_policy") or policies.RedirectPolicy(**kwargs) - self.retry_policy = kwargs.get( - "retry_policy") or policies.RetryPolicy(**kwargs) + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_patch.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_patch.py index 2bd950a309c..87676c65a8f 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_patch.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_patch.py @@ -9,8 +9,7 @@ """ -# Add all objects you want publicly available to users at this package level -__all__: list[str] = [] +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/serialization.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/serialization.py index 6d77cffbf80..53fbca493e3 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/serialization.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/_utils/serialization.py @@ -1,5 +1,5 @@ # pylint: disable=line-too-long,useless-suppression,too-many-lines -# pylint: disable=protected-access,broad-exception-caught,multiple-statements +# flake8: noqa: E731 # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -57,8 +57,7 @@ class RawDeserializer: CONTEXT_NAME = "deserialized_data" @classmethod - def deserialize_from_text( - cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: """Decode data according to content-type. Accept a stream of data as well, but will be load at once in memory for now. @@ -91,17 +90,14 @@ def deserialize_from_text( try: return json.loads(data_as_str) except ValueError as err: - raise DeserializationError( - "JSON is invalid: {}".format(err), err) from err + raise DeserializationError("JSON is invalid: {}".format(err), err) from err elif "xml" in (content_type or []): try: try: if isinstance(data, unicode): # type: ignore - # If I'm Python 2.7 and unicode XML will scream if I - # try a "fromstring" on unicode string - data_as_str = data_as_str.encode( - encoding="utf-8") # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore except NameError: pass @@ -128,12 +124,10 @@ def _json_attemp(data): raise DeserializationError("XML is invalid") from err elif content_type.startswith("text/"): return data_as_str - raise DeserializationError( - "Cannot deserialize content-type: {}".format(content_type)) + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod - def deserialize_from_http_generics( - cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: """Deserialize from HTTP response. Use bytes and headers to NOT use any requests/aiohttp or whatever @@ -148,8 +142,7 @@ def deserialize_from_http_generics( # Try to use content-type from headers if available content_type = None if "content-type" in headers: - content_type = headers["content-type"].split(";")[ - 0].strip().lower() + content_type = headers["content-type"].split(";")[0].strip().lower() # Ouch, this server did not declare what it sent... # Let's guess it's JSON... # Also, since Autorest was considering that an empty body was a valid JSON, @@ -241,15 +234,9 @@ def __init__(self, **kwargs: Any) -> None: self.additional_properties: Optional[dict[str, Any]] = {} for k in kwargs: # pylint: disable=consider-using-dict-items if k not in self._attribute_map: - _LOGGER.warning( - "%s is not a known attribute of class %s and will be ignored", - k, - self.__class__) + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) elif k in self._validation and self._validation[k].get("readonly", False): - _LOGGER.warning( - "Readonly attribute %s will be ignored in class %s", - k, - self.__class__) + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) else: setattr(self, k, kwargs[k]) @@ -278,8 +265,7 @@ def __str__(self) -> str: @classmethod def enable_additional_properties_sending(cls) -> None: - cls._attribute_map["additional_properties"] = { - "key": "", "type": "{object}"} + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} @classmethod def is_xml_model(cls) -> bool: @@ -301,11 +287,7 @@ def _create_xml_node(cls): except AttributeError: xml_map = {} - return _create_xml_node( - xml_map.get( - "name", cls.__name__), xml_map.get( - "prefix", None), xml_map.get( - "ns", None)) + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: """Return the JSON that would be sent to server from this model. @@ -370,24 +352,16 @@ def _infer_class_models(cls): try: str_models = cls.__module__.rsplit(".", 1)[0] models = sys.modules[str_models] - client_models = { - k: v for k, - v in models.__dict__.items() if isinstance( - v, - type)} + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} if cls.__name__ not in client_models: raise ValueError("Not Autorest generated code") except Exception: # pylint: disable=broad-exception-caught - # Assume it's not Autorest generated (tests?). Add ourselves as - # dependencies. + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. client_models = {cls.__name__: cls} return client_models @classmethod - def deserialize( - cls, - data: Any, - content_type: Optional[str] = None) -> Self: + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: """Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. @@ -397,10 +371,7 @@ def deserialize( :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) - return deserializer( - cls.__name__, - data, - content_type=content_type) # type: ignore + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def from_dict( @@ -432,10 +403,7 @@ def from_dict( if key_extractors is None else key_extractors ) - return deserializer( - cls.__name__, - data, - content_type=content_type) # type: ignore + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def _flatten_subtype(cls, key, objects): @@ -443,8 +411,7 @@ def _flatten_subtype(cls, key, objects): return {} result = dict(cls._subtype_map[key]) for valuetype in cls._subtype_map[key].values(): - result |= objects[valuetype]._flatten_subtype( - key, objects) # pylint: disable=protected-access + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access return result @classmethod @@ -461,24 +428,18 @@ def _classify(cls, response, objects): subtype_value = None if not isinstance(response, ET.Element): - rest_api_response_key = cls._get_rest_key_parts( - subtype_key)[-1] - subtype_value = response.get( - rest_api_response_key, None) or response.get( - subtype_key, None) + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) else: - subtype_value = xml_key_extractor( - subtype_key, cls._attribute_map[subtype_key], response) + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) if subtype_value: # Try to match base class. Can be class name only # (bug to fix in Autorest to support x-ms-discriminator-name) if cls.__name__ == subtype_value: return cls - flatten_mapping_type = cls._flatten_subtype( - subtype_key, objects) + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) try: - # type: ignore - return objects[flatten_mapping_type[subtype_value]] + return objects[flatten_mapping_type[subtype_value]] # type: ignore except KeyError: _LOGGER.warning( "Subtype value %s has no mapping, use base class %s.", @@ -487,10 +448,7 @@ def _classify(cls, response, objects): ) break else: - _LOGGER.warning( - "Discriminator %s is absent or null, use base class %s.", - subtype_key, - cls.__name__) + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) break return cls @@ -502,8 +460,7 @@ def _get_rest_key_parts(cls, attr_key): :rtype: list """ rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) - return [_decode_attribute_map_key(key_part) - for key_part in rest_split_key] + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] def _decode_attribute_map_key(key): @@ -523,14 +480,7 @@ class Serializer: # pylint: disable=too-many-public-methods basic_types = {str: "str", int: "int", bool: "bool", float: "float"} _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} - days = { - 0: "Mon", - 1: "Tue", - 2: "Wed", - 3: "Thu", - 4: "Fri", - 5: "Sat", - 6: "Sun"} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} months = { 1: "Jan", 2: "Feb", @@ -610,8 +560,7 @@ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, to try: is_xml_model_serialization = kwargs["is_xml"] except KeyError: - is_xml_model_serialization = kwargs.setdefault( - "is_xml", target_obj.is_xml_model()) + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) serialized = {} if is_xml_model_serialization: @@ -635,13 +584,11 @@ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, to if is_xml_model_serialization: pass # Don't provide "transformer" for XML for now. Keep "orig_attr" else: # JSON - keys, orig_attr = key_transformer( - attr, attr_desc.copy(), orig_attr) + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) keys = keys if isinstance(keys, list) else [keys] kwargs["serialization_ctxt"] = attr_desc - new_attr = self.serialize_data( - orig_attr, attr_desc["type"], **kwargs) + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) if is_xml_model_serialization: xml_desc = attr_desc.get("xml", {}) @@ -661,21 +608,17 @@ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, to serialized.extend(new_attr) # type: ignore elif isinstance(new_attr, ET.Element): # If the down XML has no XML/Name, - # we MUST replace the tag with the local tag. But - # keeping the namespaces. - if "name" not in getattr( - orig_attr, "_xml_map", {}): + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): splitted_tag = new_attr.tag.split("}") if len(splitted_tag) == 2: # Namespace - new_attr.tag = "}".join( - [splitted_tag[0], xml_name]) + new_attr.tag = "}".join([splitted_tag[0], xml_name]) else: new_attr.tag = xml_name serialized.append(new_attr) # type: ignore else: # That's a basic type # Integrate namespace if necessary - local_node = _create_xml_node( - xml_name, xml_prefix, xml_ns) + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) local_node.text = str(new_attr) serialized.append(local_node) # type: ignore else: # JSON @@ -694,8 +637,7 @@ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, to raise except (AttributeError, KeyError, TypeError) as err: - msg = "Attribute {} in object {} cannot be serialized.\n{}".format( - attr_name, class_name, str(target_obj)) + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) raise SerializationError(msg) from err return serialized @@ -712,14 +654,12 @@ def body(self, data, data_type, **kwargs): # Just in case this is a dict internal_data_type_str = data_type.strip("[]{}") - internal_data_type = self.dependencies.get( - internal_data_type_str, None) + internal_data_type = self.dependencies.get(internal_data_type_str, None) try: is_xml_model_serialization = kwargs["is_xml"] except KeyError: if internal_data_type and issubclass(internal_data_type, Model): - is_xml_model_serialization = kwargs.setdefault( - "is_xml", internal_data_type.is_xml_model()) + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) else: is_xml_model_serialization = False if internal_data_type and not isinstance(internal_data_type, Enum): @@ -738,11 +678,9 @@ def body(self, data, data_type, **kwargs): attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor, ] - data = deserializer._deserialize( - data_type, data) # pylint: disable=protected-access + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access except DeserializationError as err: - raise SerializationError( - "Unable to build a model: " + str(err)) from err + raise SerializationError("Unable to build a model: " + str(err)) from err return self._serialize(data, data_type, **kwargs) @@ -764,14 +702,11 @@ def url(self, name, data, data_type, **kwargs): if kwargs.get("skip_quote") is True: output = str(output) - output = output.replace( - "{", quote("{")).replace("}", quote("}")) + output = output.replace("{", quote("{")).replace("}", quote("}")) else: output = quote(str(output), safe="") except SerializationError as exc: - raise TypeError( - "{} must be type {}.".format( - name, data_type)) from exc + raise TypeError("{} must be type {}.".format(name, data_type)) from exc return output def query(self, name, data, data_type, **kwargs): @@ -786,13 +721,11 @@ def query(self, name, data, data_type, **kwargs): :returns: The serialized query parameter """ try: - # Treat the list aside, since we don't want to encode the div - # separator + # Treat the list aside, since we don't want to encode the div separator if data_type.startswith("["): internal_data_type = data_type[1:-1] do_quote = not kwargs.get("skip_quote", False) - return self.serialize_iter( - data, internal_data_type, do_quote=do_quote, **kwargs) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) # Not a list, regular serialization output = self.serialize_data(data, data_type, **kwargs) @@ -803,9 +736,7 @@ def query(self, name, data, data_type, **kwargs): else: output = quote(str(output), safe="") except SerializationError as exc: - raise TypeError( - "{} must be type {}.".format( - name, data_type)) from exc + raise TypeError("{} must be type {}.".format(name, data_type)) from exc return str(output) def header(self, name, data, data_type, **kwargs): @@ -827,9 +758,7 @@ def header(self, name, data, data_type, **kwargs): if data_type == "bool": output = json.dumps(output) except SerializationError as exc: - raise TypeError( - "{} must be type {}.".format( - name, data_type)) from exc + raise TypeError("{} must be type {}.".format(name, data_type)) from exc return str(output) def serialize_data(self, data, data_type, **kwargs): @@ -857,15 +786,13 @@ def serialize_data(self, data, data_type, **kwargs): # If dependencies is empty, try with current data class # It has to be a subclass of Enum anyway - enum_type = self.dependencies.get( - data_type, cast(type, data.__class__)) + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) if issubclass(enum_type, Enum): return Serializer.serialize_enum(data, enum_obj=enum_type) iter_type = data_type[0] + data_type[-1] if iter_type in self.serialize_type: - return self.serialize_type[iter_type]( - data, data_type[1:-1], **kwargs) + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." @@ -874,8 +801,7 @@ def serialize_data(self, data, data_type, **kwargs): @classmethod def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements - custom_serializer = kwargs.get( - "basic_types_serializers", {}).get(data_type) + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) if custom_serializer: return custom_serializer if kwargs.get("is_xml", False): @@ -964,9 +890,7 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): serialized.append(None) if kwargs.get("do_quote", False): - serialized = [ - "" if s is None else quote( - str(s), safe="") for s in serialized] + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] if div: serialized = ["" if s is None else str(s) for s in serialized] @@ -979,15 +903,11 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): if not xml_name: xml_name = serialization_ctxt["key"] - # Create a wrap node if necessary (use the fact that Element and - # list have "append") + # Create a wrap node if necessary (use the fact that Element and list have "append") is_wrapped = xml_desc.get("wrapped", False) node_name = xml_desc.get("itemsName", xml_name) if is_wrapped: - final_result = _create_xml_node( - xml_name, xml_desc.get( - "prefix", None), xml_desc.get( - "ns", None)) + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) else: final_result = [] # All list elements to "local_node" @@ -995,10 +915,7 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): if isinstance(el, ET.Element): el_node = el else: - el_node = _create_xml_node( - node_name, xml_desc.get( - "prefix", None), xml_desc.get( - "ns", None)) + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) if el is not None: # Otherwise it writes "None" :-p el_node.text = str(el) final_result.append(el_node) @@ -1017,8 +934,7 @@ def serialize_dict(self, attr, dict_type, **kwargs): serialized = {} for key, value in attr.items(): try: - serialized[self.serialize_unicode(key)] = self.serialize_data( - value, dict_type, **kwargs) + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) except ValueError as err: if isinstance(err, SerializationError): raise @@ -1029,10 +945,7 @@ def serialize_dict(self, attr, dict_type, **kwargs): xml_desc = serialization_ctxt["xml"] xml_name = xml_desc["name"] - final_result = _create_xml_node( - xml_name, xml_desc.get( - "prefix", None), xml_desc.get( - "ns", None)) + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) for key, value in serialized.items(): ET.SubElement(final_result, key).text = value return final_result @@ -1055,8 +968,7 @@ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-s return attr obj_type = type(attr) if obj_type in self.basic_types: - return self.serialize_basic( - attr, self.basic_types[obj_type], **kwargs) + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) if obj_type is _long_type: return self.serialize_long(attr) if obj_type is str: @@ -1080,8 +992,7 @@ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-s serialized = {} for key, value in attr.items(): try: - serialized[self.serialize_unicode( - key)] = self.serialize_object(value, **kwargs) + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) except ValueError: serialized[self.serialize_unicode(key)] = None return serialized @@ -1204,12 +1115,10 @@ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument """ try: if not attr.tzinfo: - _LOGGER.warning( - "Datetime with no tzinfo will be considered UTC.") + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() except AttributeError as exc: - raise TypeError( - "RFC1123 object must be valid Datetime object.") from exc + raise TypeError("RFC1123 object must be valid Datetime object.") from exc return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( Serializer.days[utc.tm_wday], @@ -1234,22 +1143,17 @@ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument attr = isodate.parse_datetime(attr) try: if not attr.tzinfo: - _LOGGER.warning( - "Datetime with no tzinfo will be considered UTC.") + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() if utc.tm_year > 9999 or utc.tm_year < 1: raise OverflowError("Hit max or min date") - microseconds = str( - attr.microsecond).rjust( - 6, - "0").rstrip("0").ljust( - 3, - "0") + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") if microseconds: microseconds = "." + microseconds date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( - utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec) + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) return date + microseconds + "Z" except (ValueError, OverflowError) as err: msg = "Unable to serialize datetime object." @@ -1272,12 +1176,10 @@ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument return attr try: if not attr.tzinfo: - _LOGGER.warning( - "Datetime with no tzinfo will be considered UTC.") + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") return int(calendar.timegm(attr.utctimetuple())) except AttributeError as exc: - raise TypeError( - "Unix time object must be valid Datetime object.") from exc + raise TypeError("Unix time object must be valid Datetime object.") from exc def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument @@ -1285,8 +1187,7 @@ def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argumen working_data = data while "." in key: - # Need the cast, as for some reasons "split" is typed as list[str | - # Any] + # Need the cast, as for some reasons "split" is typed as list[str | Any] dict_keys = cast(list[str], _FLATTEN.split(key)) if len(dict_keys) == 1: key = _decode_attribute_map_key(dict_keys[0]) @@ -1314,8 +1215,7 @@ def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inc key = _decode_attribute_map_key(dict_keys[0]) break working_key = _decode_attribute_map_key(dict_keys[0]) - working_data = attribute_key_case_insensitive_extractor( - working_key, None, working_data) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) if working_data is None: # If at any point while following flatten JSON path see None, it means # that all properties under are None as well @@ -1323,8 +1223,7 @@ def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inc key = ".".join(dict_keys[1:]) if working_data: - return attribute_key_case_insensitive_extractor( - key, None, working_data) + return attribute_key_case_insensitive_extractor(key, None, working_data) def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument @@ -1420,11 +1319,7 @@ def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument # - Wrapped node # - Internal type is an enum (considered basic types) # - Internal type has no XML/Name node - if is_wrapped or ( - internal_type and ( - issubclass( - internal_type, - Enum) or "name" not in internal_type_xml_map)): + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): children = data.findall(xml_name) # If internal type has a local name and it's not a list, I use that name elif not is_iter_type and internal_type and "name" in internal_type_xml_map: @@ -1460,8 +1355,7 @@ def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument # Here it's not a itertype, we should have found one element only or empty if len(children) > 1: - raise DeserializationError( - "Find several XML '{}' where it was not expected".format(xml_name)) + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) return children[0] @@ -1474,8 +1368,7 @@ class Deserializer: basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - valid_date = re.compile( - r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: self.deserialize_type = { @@ -1533,15 +1426,9 @@ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return """ # This is already a model, go recursive just in case if hasattr(data, "_attribute_map"): - constants = [ - name for name, - config in getattr( - data, - "_validation", - {}).items() if config.get("constant")] + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] try: - for attr, mapconfig in data._attribute_map.items( - ): # pylint: disable=protected-access + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access if attr in constants: continue value = getattr(data, attr) @@ -1549,8 +1436,7 @@ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return continue local_type = mapconfig["type"] internal_data_type = local_type.strip("[]{}") - if internal_data_type not in self.dependencies or isinstance( - internal_data_type, Enum): + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): continue setattr(data, attr, self._deserialize(local_type, value)) return data @@ -1567,12 +1453,10 @@ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return if data is None or data is CoreNull: return data try: - # type: ignore # pylint: disable=protected-access - attributes = response._attribute_map + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access d_attrs = {} for attr, attr_desc in attributes.items(): - # Check empty string. If it's not empty, someone has a real - # "additionalProperties"... + # Check empty string. If it's not empty, someone has a real "additionalProperties"... if attr == "additional_properties" and attr_desc["key"] == "": continue raw_value = None @@ -1588,9 +1472,9 @@ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return if raw_value is not None and raw_value != found_value: msg = ( "Ignoring extracted value '%s' from %s for key '%s'" - " (duplicate extraction, follow extractors order)") - _LOGGER.warning( - msg, found_value, key_extractor, attr) + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) continue raw_value = found_value @@ -1599,18 +1483,14 @@ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name # type: ignore raise DeserializationError(msg) from err - additional_properties = self._build_additional_properties( - attributes, data) - return self._instantiate_model( - response, d_attrs, additional_properties) + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) def _build_additional_properties(self, attribute_map, data): if not self.additional_properties_detection: return None - if "additional_properties" in attribute_map and attribute_map.get( - "additional_properties", {}).get("key") != "": - # Check empty string. If it's not empty, someone has a real - # "additionalProperties" + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" return None if isinstance(data, ET.Element): data = {el.tag: el.text for el in data} @@ -1644,8 +1524,7 @@ def _classify_target(self, target, data): return target, target try: - # type: ignore # pylint: disable=protected-access - target = target._classify(data, self.dependencies) + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access except AttributeError: pass # Target is not a Model, no classify return target, target.__class__.__name__ # type: ignore @@ -1665,10 +1544,10 @@ def failsafe_deserialize(self, target_obj, data, content_type=None): """ try: return self(target_obj, data, content_type=content_type) - except BaseException: # pylint: disable=bare-except + except: # pylint: disable=bare-except _LOGGER.debug( - "Ran into a deserialization error. Ignoring since this is failsafe deserialization", - exc_info=True) + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) return None @staticmethod @@ -1690,30 +1569,23 @@ def _unpack_content(raw_data, content_type=None): :rtype: object :return: Unpacked content. """ - # Assume this is enough to detect a Pipeline Response without importing - # it + # Assume this is enough to detect a Pipeline Response without importing it context = getattr(raw_data, "context", {}) if context: if RawDeserializer.CONTEXT_NAME in context: return context[RawDeserializer.CONTEXT_NAME] - raise ValueError( - "This pipeline didn't have the RawDeserializer policy; can't deserialize") + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") - # Assume this is enough to recognize universal_http.ClientResponse - # without importing it + # Assume this is enough to recognize universal_http.ClientResponse without importing it if hasattr(raw_data, "body"): - return RawDeserializer.deserialize_from_http_generics( - raw_data.text(), raw_data.headers) + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) - # Assume this enough to recognize requests.Response without importing - # it. + # Assume this enough to recognize requests.Response without importing it. if hasattr(raw_data, "_content_consumed"): - return RawDeserializer.deserialize_from_http_generics( - raw_data.text, raw_data.headers) + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): - return RawDeserializer.deserialize_from_text( - raw_data, content_type) # type: ignore + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore return raw_data def _instantiate_model(self, response, attrs, additional_properties=None): @@ -1738,10 +1610,7 @@ def _instantiate_model(self, response, attrs, additional_properties=None): for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore if v.get("constant") ] - kwargs = { - k: v for k, - v in attrs.items() if k not in subtype and k not in readonly + - const} + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} response_obj = response(**kwargs) for attr in readonly: setattr(response_obj, attr, attrs.get(attr)) @@ -1749,8 +1618,7 @@ def _instantiate_model(self, response, attrs, additional_properties=None): response_obj.additional_properties = additional_properties # type: ignore return response_obj except TypeError as err: - msg = "Unable to deserialize {} into model {}. ".format( - kwargs, response) # type: ignore + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore raise DeserializationError(msg + str(err)) from err else: try: @@ -1780,21 +1648,15 @@ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return- if data_type in self.basic_types.values(): return self.deserialize_basic(data, data_type) if data_type in self.deserialize_type: - if isinstance( - data, - self.deserialize_expected_types.get( - data_type, - tuple())): + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): return data - def is_a_text_parsing_type(x): # pylint: disable=unnecessary-lambda-assignment - return x not in [ - "object", - "[]", - r"{}", - ] - if isinstance(data, ET.Element) and is_a_text_parsing_type( - data_type) and not data.text: + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: return None data_val = self.deserialize_type[data_type](data) return data_val @@ -1825,14 +1687,10 @@ def deserialize_iter(self, attr, iter_type): """ if attr is None: return None - if isinstance( - attr, - ET.Element): # If I receive an element here, get the children + if isinstance(attr, ET.Element): # If I receive an element here, get the children attr = list(attr) if not isinstance(attr, (list, set)): - raise DeserializationError( - "Cannot deserialize as [{}] an object of type {}".format( - iter_type, type(attr))) + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) return [self.deserialize_data(a, iter_type) for a in attr] def deserialize_dict(self, attr, dict_type): @@ -1845,16 +1703,12 @@ def deserialize_dict(self, attr, dict_type): :rtype: dict """ if isinstance(attr, list): - return { - x["key"]: self.deserialize_data( - x["value"], - dict_type) for x in attr} + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} if isinstance(attr, ET.Element): # Transform value into {"Key": "value"} attr = {el.tag: el.text for el in attr} - return {k: self.deserialize_data(v, dict_type) - for k, v in attr.items()} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements """Deserialize a generic object. @@ -1882,8 +1736,7 @@ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return deserialized = {} for key, value in attr.items(): try: - deserialized[key] = self.deserialize_object( - value, **kwargs) + deserialized[key] = self.deserialize_object(value, **kwargs) except ValueError: deserialized[key] = None return deserialized @@ -1987,20 +1840,15 @@ def deserialize_enum(data, enum_obj): return list(enum_obj.__members__.values())[data] except IndexError as exc: error = "{!r} is not a valid index for enum {!r}" - raise DeserializationError( - error.format(data, enum_obj)) from exc + raise DeserializationError(error.format(data, enum_obj)) from exc try: return enum_obj(str(data)) except ValueError: for enum_value in enum_obj: if enum_value.value.lower() == str(data).lower(): return enum_value - # We don't fail anymore for unknown value, we deserialize as a - # string - _LOGGER.warning( - "Deserializer is not able to find %s as valid enum in %s", - data, - enum_obj) + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) return Deserializer.deserialize_unicode(data) @staticmethod @@ -2092,11 +1940,8 @@ def deserialize_date(attr): if isinstance(attr, ET.Element): attr = attr.text if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore - raise DeserializationError( - "Date must have only digits and -. Received: %s" % - attr) - # This must NOT use defaultmonth/defaultday. Using None ensure this - # raises an exception. + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. return isodate.parse_date(attr, defaultmonth=0, defaultday=0) @staticmethod @@ -2111,9 +1956,7 @@ def deserialize_time(attr): if isinstance(attr, ET.Element): attr = attr.text if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore - raise DeserializationError( - "Date must have only digits and -. Received: %s" % - attr) + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) return isodate.parse_time(attr) @staticmethod @@ -2129,8 +1972,9 @@ def deserialize_rfc(attr): attr = attr.text try: parsed_date = email.utils.parsedate_tz(attr) # type: ignore - date_obj = datetime.datetime(*parsed_date[:6], tzinfo=datetime.timezone( - datetime.timedelta(minutes=(parsed_date[9] or 0) / 60))) + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) if not date_obj.tzinfo: date_obj = date_obj.astimezone(tz=TZ_UTC) except ValueError as err: diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/__init__.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/__init__.py index e3229dff489..a2915430dea 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/__init__.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/__init__.py @@ -3,17 +3,19 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- +# flake8: noqa: F403 # pylint: disable=wrong-import-position from typing import TYPE_CHECKING if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import # noqa: F403 + from ._patch import * # pylint: disable=unused-wildcard-import from ._client import AnalyticsFrontendAPI # type: ignore try: from ._patch import __all__ as _patch_all + from ._patch import * except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_client.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_client.py index eeb74e28e1c..efbdf666ab9 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_client.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_client.py @@ -38,32 +38,26 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential _policies = kwargs.pop("policies", None) if _policies is None: _policies = [ - policies.RequestIdPolicy( - **kwargs), + policies.RequestIdPolicy(**kwargs), self._config.headers_policy, self._config.user_agent_policy, self._config.proxy_policy, - policies.ContentDecodePolicy( - **kwargs), + policies.ContentDecodePolicy(**kwargs), self._config.redirect_policy, self._config.retry_policy, self._config.authentication_policy, self._config.custom_hook_policy, self._config.logging_policy, - policies.DistributedTracingPolicy( - **kwargs), - policies.SensitiveHeaderCleanupPolicy( - **kwargs) if self._config.redirect_policy else None, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, self._config.http_logging_policy, ] - self._client: AsyncPipelineClient = AsyncPipelineClient( - base_url=endpoint, policies=_policies, **kwargs) + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=endpoint, policies=_policies, **kwargs) self._serialize = Serializer() self._deserialize = Deserializer() self._serialize.client_side_validation = False - self.collaboration = CollaborationOperations( - self._client, self._config, self._serialize, self._deserialize) + self.collaboration = CollaborationOperations(self._client, self._config, self._serialize, self._deserialize) def send_request( self, request: HttpRequest, *, stream: bool = False, **kwargs: Any @@ -87,8 +81,7 @@ def send_request( request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) - return self._client.send_request( - request_copy, stream=stream, **kwargs) # type: ignore + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore async def close(self) -> None: await self._client.close() diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_configuration.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_configuration.py index 6ce8e57c748..57d5d6b89bb 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_configuration.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_configuration.py @@ -3,6 +3,7 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- +# pylint: disable=too-few-public-methods from typing import Any @@ -11,7 +12,7 @@ VERSION = "unknown" -class AnalyticsFrontendAPIConfiguration: # pylint: disable=too-many-instance-attributes,too-few-public-methods +class AnalyticsFrontendAPIConfiguration: # pylint: disable=too-many-instance-attributes """Configuration for AnalyticsFrontendAPI. Note that all parameters used to create this instance are saved as instance @@ -26,27 +27,17 @@ def __init__(self, **kwargs: Any) -> None: api_version: str = kwargs.pop("api_version", "2026-03-01-preview") self.api_version = api_version - kwargs.setdefault( - "sdk_moniker", - "analyticsfrontendapi/{}".format(VERSION)) + kwargs.setdefault("sdk_moniker", "analyticsfrontendapi/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) def _configure(self, **kwargs: Any) -> None: - self.user_agent_policy = kwargs.get( - "user_agent_policy") or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get( - "headers_policy") or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get( - "proxy_policy") or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get( - "logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get( - "http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.custom_hook_policy = kwargs.get( - "custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get( - "redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) - self.retry_policy = kwargs.get( - "retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_patch.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_patch.py index 2bd950a309c..87676c65a8f 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_patch.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/_patch.py @@ -9,8 +9,7 @@ """ -# Add all objects you want publicly available to users at this package level -__all__: list[str] = [] +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/__init__.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/__init__.py index 91ecd54c4e2..5a4c57889d9 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/__init__.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/__init__.py @@ -3,16 +3,18 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- +# flake8: noqa: F403 # pylint: disable=wrong-import-position from typing import TYPE_CHECKING if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import # noqa: F403 + from ._patch import * # pylint: disable=unused-wildcard-import from ._operations import CollaborationOperations # type: ignore from ._patch import __all__ as _patch_all +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_operations.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_operations.py index b7ff58f9fba..e3ec7f77050 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_operations.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_operations.py @@ -26,38 +26,36 @@ from ...operations._operations import ( build_collaboration_analytics_auditevents_get_request, build_collaboration_analytics_cleanroompolicy_get_request, - build_collaboration_analytics_dataset_document_id_get_request, - build_collaboration_analytics_dataset_document_id_publish_post_request, + build_collaboration_analytics_datasets_document_id_get_request, + build_collaboration_analytics_datasets_document_id_publish_post_request, build_collaboration_analytics_datasets_document_id_queries_get_request, build_collaboration_analytics_datasets_list_get_request, build_collaboration_analytics_get_request, build_collaboration_analytics_queries_document_id_get_request, build_collaboration_analytics_queries_document_id_publish_post_request, build_collaboration_analytics_queries_document_id_run_post_request, - build_collaboration_analytics_queries_document_id_runhistory_get_request, + build_collaboration_analytics_queries_document_id_runs_get_request, build_collaboration_analytics_queries_document_id_vote_post_request, - build_collaboration_analytics_queries_jobid_get_request, build_collaboration_analytics_queries_list_get_request, + build_collaboration_analytics_runs_job_id_get_request, build_collaboration_analytics_secrets_secret_name_put_request, - build_collaboration_check_consent_document_id_get_request, + build_collaboration_consent_document_id_get_request, + build_collaboration_consent_document_id_put_request, build_collaboration_id_get_request, build_collaboration_invitation_id_accept_post_request, build_collaboration_invitation_id_get_request, build_collaboration_invitations_get_request, - build_collaboration_list_request, + build_collaboration_list_get_request, build_collaboration_oidc_issuer_info_get_request, build_collaboration_oidc_keys_get_request, build_collaboration_oidc_set_issuer_url_post_request, build_collaboration_report_get_request, - build_collaboration_set_consent_document_id_put_request, ) from .._configuration import AnalyticsFrontendAPIConfiguration JSON = MutableMapping[str, Any] T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, - AsyncHttpResponse], T, dict[str, Any]], Any]] -List = list +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] class CollaborationOperations: # pylint: disable=too-many-public-methods @@ -72,21 +70,13 @@ class CollaborationOperations: # pylint: disable=too-many-public-methods def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client: AsyncPipelineClient = input_args.pop( - 0) if input_args else kwargs.pop("client") - self._config: AnalyticsFrontendAPIConfiguration = input_args.pop( - 0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop( - 0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop( - 0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AnalyticsFrontendAPIConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def list( - self, - *, - active_only: bool = False, - **kwargs: Any) -> List[JSON]: + async def list_get(self, *, active_only: bool = False, **kwargs: Any) -> list[JSON]: """List all collaborations. List all collaborations. @@ -121,9 +111,9 @@ async def list( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[JSON]] = kwargs.pop("cls", None) + cls: ClsType[list[JSON]] = kwargs.pop("cls", None) - _request = build_collaboration_list_request( + _request = build_collaboration_list_get_request( active_only=active_only, api_version=self._config.api_version, headers=_headers, @@ -139,10 +129,7 @@ async def list( response = pipeline_response.http_response if response.status_code not in [200]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -151,22 +138,12 @@ async def list( deserialized = None if cls: - return cls( - pipeline_response, - cast( - List[JSON], - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(list[JSON], deserialized), {}) # type: ignore - return cast(List[JSON], deserialized) # type: ignore + return cast(list[JSON], deserialized) # type: ignore @distributed_trace_async - async def id_get( - self, - collaboration_id: str, - *, - active_only: bool = False, - **kwargs: Any) -> JSON: + async def id_get(self, collaboration_id: str, *, active_only: bool = False, **kwargs: Any) -> JSON: """Get collaboration by id. Get collaboration by id. @@ -228,10 +205,7 @@ async def id_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -240,12 +214,7 @@ async def id_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @@ -338,10 +307,7 @@ async def report_get(self, collaboration_id: str, **kwargs: Any) -> JSON: response = pipeline_response.http_response if response.status_code not in [200, 400, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -350,20 +316,12 @@ async def report_get(self, collaboration_id: str, **kwargs: Any) -> JSON: deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def analytics_get( - self, - collaboration_id: str, - **kwargs: Any) -> JSON: + async def analytics_get(self, collaboration_id: str, **kwargs: Any) -> JSON: """Get collaboration analytics workload. Get collaboration analytics workload. @@ -422,10 +380,7 @@ async def analytics_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -434,18 +389,12 @@ async def analytics_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def analytics_cleanroompolicy_get( - self, collaboration_id: str, **kwargs: Any) -> JSON: + async def analytics_cleanroompolicy_get(self, collaboration_id: str, **kwargs: Any) -> JSON: """Get collaboration analytics cleanroompolicy. Get collaboration analytics cleanroompolicy. @@ -508,10 +457,7 @@ async def analytics_cleanroompolicy_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -520,23 +466,15 @@ async def analytics_cleanroompolicy_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def oidc_issuer_info_get( - self, - collaboration_id: str, - **kwargs: Any) -> JSON: - """Get collaboration oidcissuer. + async def oidc_issuer_info_get(self, collaboration_id: str, **kwargs: Any) -> JSON: + """Get collaboration OIDC issuer info. - Get collaboration oidcissuer. + Get collaboration OIDC issuer info. :param collaboration_id: Required. :type collaboration_id: str @@ -594,10 +532,7 @@ async def oidc_issuer_info_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -606,12 +541,7 @@ async def oidc_issuer_info_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @@ -705,11 +635,9 @@ async def oidc_set_issuer_url_post( """ @distributed_trace_async - async def oidc_set_issuer_url_post(self, - collaboration_id: str, - body: Optional[Union[JSON, - IO[bytes]]] = None, - **kwargs: Any) -> JSON: + async def oidc_set_issuer_url_post( + self, collaboration_id: str, body: Optional[Union[JSON, IO[bytes]]] = None, **kwargs: Any + ) -> JSON: """Set collaboration oidc issuer url. Set collaboration oidc issuer url. @@ -755,8 +683,7 @@ async def oidc_set_issuer_url_post(self, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) @@ -790,10 +717,7 @@ async def oidc_set_issuer_url_post(self, response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -802,20 +726,12 @@ async def oidc_set_issuer_url_post(self, deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def oidc_keys_get( - self, - collaboration_id: str, - **kwargs: Any) -> JSON: + async def oidc_keys_get(self, collaboration_id: str, **kwargs: Any) -> JSON: """Get collaboration oidc signing keys (JWKS). Get collaboration oidc signing keys (JWKS). @@ -885,10 +801,7 @@ async def oidc_keys_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -897,22 +810,12 @@ async def oidc_keys_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def invitations_get( - self, - collaboration_id: str, - *, - pending_only: bool = False, - **kwargs: Any) -> JSON: + async def invitations_get(self, collaboration_id: str, *, pending_only: bool = False, **kwargs: Any) -> JSON: """List all invitations. List all invitations. @@ -974,10 +877,7 @@ async def invitations_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -986,21 +886,12 @@ async def invitations_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def invitation_id_get( - self, - collaboration_id: str, - invitation_id: str, - **kwargs: Any) -> JSON: + async def invitation_id_get(self, collaboration_id: str, invitation_id: str, **kwargs: Any) -> JSON: """Get invitation by id. Get invitation by id. @@ -1067,10 +958,7 @@ async def invitation_id_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1079,12 +967,7 @@ async def invitation_id_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @@ -1146,10 +1029,7 @@ async def invitation_id_accept_post( response = pipeline_response.http_response if response.status_code not in [204, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = None @@ -1165,8 +1045,7 @@ async def invitation_id_accept_post( return deserialized # type: ignore @distributed_trace_async - async def analytics_datasets_list_get( - self, collaboration_id: str, **kwargs: Any) -> JSON: + async def analytics_datasets_list_get(self, collaboration_id: str, **kwargs: Any) -> JSON: """List all datasets. List all datasets. @@ -1229,10 +1108,7 @@ async def analytics_datasets_list_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1241,21 +1117,12 @@ async def analytics_datasets_list_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def analytics_dataset_document_id_get( - self, - collaboration_id: str, - document_id: str, - **kwargs: Any) -> JSON: + async def analytics_datasets_document_id_get(self, collaboration_id: str, document_id: str, **kwargs: Any) -> JSON: """Get dataset by id. Get dataset by id. @@ -1341,7 +1208,7 @@ async def analytics_dataset_document_id_get( cls: ClsType[JSON] = kwargs.pop("cls", None) - _request = build_collaboration_analytics_dataset_document_id_get_request( + _request = build_collaboration_analytics_datasets_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, api_version=self._config.api_version, @@ -1358,10 +1225,7 @@ async def analytics_dataset_document_id_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1370,17 +1234,12 @@ async def analytics_dataset_document_id_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @overload - async def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long + async def analytics_datasets_document_id_publish_post( # pylint: disable=name-too-long self, collaboration_id: str, document_id: str, @@ -1463,7 +1322,7 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to """ @overload - async def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long + async def analytics_datasets_document_id_publish_post( # pylint: disable=name-too-long self, collaboration_id: str, document_id: str, @@ -1503,7 +1362,7 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to """ @distributed_trace_async - async def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long + async def analytics_datasets_document_id_publish_post( # pylint: disable=name-too-long self, collaboration_id: str, document_id: str, body: Union[JSON, IO[bytes]], **kwargs: Any ) -> Optional[JSON]: """Publish dataset by id. @@ -1586,8 +1445,7 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) content_type = content_type or "application/json" @@ -1598,7 +1456,7 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to else: _json = body - _request = build_collaboration_analytics_dataset_document_id_publish_post_request( + _request = build_collaboration_analytics_datasets_document_id_publish_post_request( collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, @@ -1618,10 +1476,7 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to response = pipeline_response.http_response if response.status_code not in [204, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = None @@ -1637,11 +1492,7 @@ async def analytics_dataset_document_id_publish_post( # pylint: disable=name-to return deserialized # type: ignore @distributed_trace_async - async def check_consent_document_id_get( - self, - collaboration_id: str, - document_id: str, - **kwargs: Any) -> JSON: + async def consent_document_id_get(self, collaboration_id: str, document_id: str, **kwargs: Any) -> JSON: """Check execution consent by ID of the Query or the Dataset. Check execution consent by ID of the Query or the Dataset. @@ -1687,7 +1538,7 @@ async def check_consent_document_id_get( cls: ClsType[JSON] = kwargs.pop("cls", None) - _request = build_collaboration_check_consent_document_id_get_request( + _request = build_collaboration_consent_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, api_version=self._config.api_version, @@ -1704,10 +1555,7 @@ async def check_consent_document_id_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1716,17 +1564,12 @@ async def check_consent_document_id_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @overload - async def set_consent_document_id_put( + async def consent_document_id_put( self, collaboration_id: str, document_id: str, @@ -1734,7 +1577,7 @@ async def set_consent_document_id_put( *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Set execution consent (enable / disable) by ID of the Query or the Dataset. Set execution consent (enable / disable) by ID of the Query or the Dataset. @@ -1748,8 +1591,8 @@ async def set_consent_document_id_put( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -1771,7 +1614,7 @@ async def set_consent_document_id_put( """ @overload - async def set_consent_document_id_put( + async def consent_document_id_put( self, collaboration_id: str, document_id: str, @@ -1779,7 +1622,7 @@ async def set_consent_document_id_put( *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Set execution consent (enable / disable) by ID of the Query or the Dataset. Set execution consent (enable / disable) by ID of the Query or the Dataset. @@ -1793,8 +1636,8 @@ async def set_consent_document_id_put( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -1811,13 +1654,9 @@ async def set_consent_document_id_put( """ @distributed_trace_async - async def set_consent_document_id_put(self, - collaboration_id: str, - document_id: str, - body: Union[JSON, - IO[bytes]], - **kwargs: Any) -> Union[Any, - JSON]: + async def consent_document_id_put( + self, collaboration_id: str, document_id: str, body: Union[JSON, IO[bytes]], **kwargs: Any + ) -> Optional[JSON]: """Set execution consent (enable / disable) by ID of the Query or the Dataset. Set execution consent (enable / disable) by ID of the Query or the Dataset. @@ -1828,8 +1667,8 @@ async def set_consent_document_id_put(self, :type document_id: str :param body: Is either a JSON type or a IO[bytes] type. Required. :type body: JSON or IO[bytes] - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -1860,9 +1699,8 @@ async def set_consent_document_id_put(self, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -1872,7 +1710,7 @@ async def set_consent_document_id_put(self, else: _json = body - _request = build_collaboration_set_consent_document_id_put_request( + _request = build_collaboration_consent_document_id_put_request( collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, @@ -1891,23 +1729,21 @@ async def set_consent_document_id_put(self, response = pipeline_response.http_response - if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + if response.status_code not in [204, 422]: + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if response.content: - deserialized = response.json() - else: - deserialized = None + deserialized = None + if response.status_code == 422: + if response.content: + deserialized = response.json() + else: + deserialized = None if cls: - return cls(pipeline_response, cast( - Union[Any, JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore - return cast(Union[Any, JSON], deserialized) # type: ignore + return deserialized # type: ignore @overload async def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long @@ -2058,8 +1894,7 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) content_type = content_type or "application/json" @@ -2090,10 +1925,7 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to response = pipeline_response.http_response if response.status_code not in [204, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = None @@ -2109,10 +1941,7 @@ async def analytics_queries_document_id_publish_post( # pylint: disable=name-to return deserialized # type: ignore @distributed_trace_async - async def analytics_queries_list_get( - self, - collaboration_id: str, - **kwargs: Any) -> JSON: + async def analytics_queries_list_get(self, collaboration_id: str, **kwargs: Any) -> JSON: """List all queries. List all queries. @@ -2175,10 +2004,7 @@ async def analytics_queries_list_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -2187,21 +2013,12 @@ async def analytics_queries_list_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def analytics_queries_document_id_get( - self, - collaboration_id: str, - document_id: str, - **kwargs: Any) -> JSON: + async def analytics_queries_document_id_get(self, collaboration_id: str, document_id: str, **kwargs: Any) -> JSON: """Get query by id. Get query by id. @@ -2282,10 +2099,7 @@ async def analytics_queries_document_id_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -2294,12 +2108,7 @@ async def analytics_queries_document_id_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @@ -2436,8 +2245,7 @@ async def analytics_queries_document_id_vote_post( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) content_type = content_type if body else None cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) @@ -2472,10 +2280,7 @@ async def analytics_queries_document_id_vote_post( response = pipeline_response.http_response if response.status_code not in [204, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = None @@ -2710,8 +2515,7 @@ async def analytics_queries_document_id_run_post( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[JSON] = kwargs.pop("cls", None) content_type = content_type or "application/json" @@ -2742,10 +2546,7 @@ async def analytics_queries_document_id_run_post( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -2754,29 +2555,20 @@ async def analytics_queries_document_id_run_post( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def analytics_queries_jobid_get( - self, - collaboration_id: str, - jobid: str, - **kwargs: Any) -> JSON: + async def analytics_runs_job_id_get(self, collaboration_id: str, job_id: str, **kwargs: Any) -> JSON: """Get query run result by job id. Get query run result by job id. :param collaboration_id: Required. :type collaboration_id: str - :param jobid: Required. - :type jobid: str + :param job_id: Required. + :type job_id: str :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -2827,9 +2619,9 @@ async def analytics_queries_jobid_get( cls: ClsType[JSON] = kwargs.pop("cls", None) - _request = build_collaboration_analytics_queries_jobid_get_request( + _request = build_collaboration_analytics_runs_job_id_get_request( collaboration_id=collaboration_id, - jobid=jobid, + job_id=job_id, api_version=self._config.api_version, headers=_headers, params=_params, @@ -2844,10 +2636,7 @@ async def analytics_queries_jobid_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -2856,19 +2645,14 @@ async def analytics_queries_jobid_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace_async - async def analytics_queries_document_id_runhistory_get( # pylint: disable=name-too-long + async def analytics_queries_document_id_runs_get( self, collaboration_id: str, document_id: str, **kwargs: Any - ) -> Union[List[JSON], JSON]: + ) -> Union[list[JSON], JSON]: """Get query run history by query id. Get query run history by query id. @@ -2951,9 +2735,9 @@ async def analytics_queries_document_id_runhistory_get( # pylint: disable=name- _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[Union[List[JSON], JSON]] = kwargs.pop("cls", None) + cls: ClsType[Union[list[JSON], JSON]] = kwargs.pop("cls", None) - _request = build_collaboration_analytics_queries_document_id_runhistory_get_request( + _request = build_collaboration_analytics_queries_document_id_runs_get_request( collaboration_id=collaboration_id, document_id=document_id, api_version=self._config.api_version, @@ -2970,10 +2754,7 @@ async def analytics_queries_document_id_runhistory_get( # pylint: disable=name- response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -2982,15 +2763,14 @@ async def analytics_queries_document_id_runhistory_get( # pylint: disable=name- deserialized = None if cls: - return cls(pipeline_response, cast( - Union[List[JSON], JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, cast(Union[list[JSON], JSON], deserialized), {}) # type: ignore - return cast(Union[List[JSON], JSON], deserialized) # type: ignore + return cast(Union[list[JSON], JSON], deserialized) # type: ignore @distributed_trace_async async def analytics_datasets_document_id_queries_get( # pylint: disable=name-too-long self, collaboration_id: str, document_id: str, **kwargs: Any - ) -> Union[List[str], JSON]: + ) -> Union[list[str], JSON]: """Get queries by dataset id. Get queries by dataset id. @@ -3030,7 +2810,7 @@ async def analytics_datasets_document_id_queries_get( # pylint: disable=name-to _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[Union[List[str], JSON]] = kwargs.pop("cls", None) + cls: ClsType[Union[list[str], JSON]] = kwargs.pop("cls", None) _request = build_collaboration_analytics_datasets_document_id_queries_get_request( collaboration_id=collaboration_id, @@ -3049,10 +2829,7 @@ async def analytics_datasets_document_id_queries_get( # pylint: disable=name-to response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -3061,10 +2838,9 @@ async def analytics_datasets_document_id_queries_get( # pylint: disable=name-to deserialized = None if cls: - return cls(pipeline_response, cast( - Union[List[str], JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, cast(Union[list[str], JSON], deserialized), {}) # type: ignore - return cast(Union[List[str], JSON], deserialized) # type: ignore + return cast(Union[list[str], JSON], deserialized) # type: ignore @overload async def analytics_secrets_secret_name_put( @@ -3160,12 +2936,9 @@ async def analytics_secrets_secret_name_put( """ @distributed_trace_async - async def analytics_secrets_secret_name_put(self, - collaboration_id: str, - secret_name: str, - body: Optional[Union[JSON, - IO[bytes]]] = None, - **kwargs: Any) -> JSON: + async def analytics_secrets_secret_name_put( + self, collaboration_id: str, secret_name: str, body: Optional[Union[JSON, IO[bytes]]] = None, **kwargs: Any + ) -> JSON: """Set secret for analytics workload. Set secret for analytics workload. @@ -3212,8 +2985,7 @@ async def analytics_secrets_secret_name_put(self, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) @@ -3248,10 +3020,7 @@ async def analytics_secrets_secret_name_put(self, response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -3260,12 +3029,7 @@ async def analytics_secrets_secret_name_put(self, deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @@ -3355,10 +3119,7 @@ async def analytics_auditevents_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -3367,11 +3128,6 @@ async def analytics_auditevents_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_patch.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_patch.py index 2bd950a309c..87676c65a8f 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_patch.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/aio/operations/_patch.py @@ -9,8 +9,7 @@ """ -# Add all objects you want publicly available to users at this package level -__all__: list[str] = [] +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/__init__.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/__init__.py index 91ecd54c4e2..5a4c57889d9 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/__init__.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/__init__.py @@ -3,16 +3,18 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- +# flake8: noqa: F403 # pylint: disable=wrong-import-position from typing import TYPE_CHECKING if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import # noqa: F403 + from ._patch import * # pylint: disable=unused-wildcard-import from ._operations import CollaborationOperations # type: ignore from ._patch import __all__ as _patch_all +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_operations.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_operations.py index b8ea95d3671..61807e6a0e7 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_operations.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_operations.py @@ -27,45 +27,31 @@ JSON = MutableMapping[str, Any] T = TypeVar("T") -ClsType = Optional[Callable[[ - PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] -List = list +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -def build_collaboration_list_request( - *, - active_only: bool = False, - **kwargs: Any) -> HttpRequest: +def build_collaboration_list_get_request(*, active_only: bool = False, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = "/collaborations" # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") if active_only is not None: - _params["activeOnly"] = _SERIALIZER.query( - "active_only", active_only, "bool") + _params["activeOnly"] = _SERIALIZER.query("active_only", active_only, "bool") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_id_get_request( @@ -74,69 +60,50 @@ def build_collaboration_id_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}" + _url = "/collaborations/{collaborationId}" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") if active_only is not None: - _params["activeOnly"] = _SERIALIZER.query( - "active_only", active_only, "bool") + _params["activeOnly"] = _SERIALIZER.query("active_only", active_only, "bool") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_collaboration_report_get_request( - collaboration_id: str, - **kwargs: Any) -> HttpRequest: +def build_collaboration_report_get_request(collaboration_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/report" + _url = "/collaborations/{collaborationId}/report" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_analytics_get_request( # pylint: disable=name-too-long @@ -145,32 +112,24 @@ def build_collaboration_analytics_get_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics" + _url = "/collaborations/{collaborationId}/analytics" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_analytics_cleanroompolicy_get_request( # pylint: disable=name-too-long @@ -179,32 +138,24 @@ def build_collaboration_analytics_cleanroompolicy_get_request( # pylint: disabl _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/cleanroompolicy" + _url = "/collaborations/{collaborationId}/analytics/cleanroompolicy" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_oidc_issuer_info_get_request( # pylint: disable=name-too-long @@ -213,32 +164,24 @@ def build_collaboration_oidc_issuer_info_get_request( # pylint: disable=name-to _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/oidc/issuerInfo" + _url = "/collaborations/{collaborationId}/oidc/issuerInfo" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_oidc_set_issuer_url_post_request( # pylint: disable=name-too-long @@ -247,38 +190,27 @@ def build_collaboration_oidc_set_issuer_url_post_request( # pylint: disable=nam _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/oidc/setIssuerUrl" + _url = "/collaborations/{collaborationId}/oidc/setIssuerUrl" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_oidc_keys_get_request( # pylint: disable=name-too-long @@ -287,32 +219,24 @@ def build_collaboration_oidc_keys_get_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/oidc/keys" + _url = "/collaborations/{collaborationId}/oidc/keys" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_invitations_get_request( # pylint: disable=name-too-long @@ -321,35 +245,26 @@ def build_collaboration_invitations_get_request( # pylint: disable=name-too-lon _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/invitations" + _url = "/collaborations/{collaborationId}/invitations" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") if pending_only is not None: - _params["pendingOnly"] = _SERIALIZER.query( - "pending_only", pending_only, "bool") + _params["pendingOnly"] = _SERIALIZER.query("pending_only", pending_only, "bool") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_invitation_id_get_request( # pylint: disable=name-too-long @@ -358,39 +273,25 @@ def build_collaboration_invitation_id_get_request( # pylint: disable=name-too-l _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/invitations/{invitation_id}" + _url = "/collaborations/{collaborationId}/invitations/{invitationId}" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", - collaboration_id, - "str"), - "invitation_id": _SERIALIZER.url( - "invitation_id", - invitation_id, - "str"), + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "invitationId": _SERIALIZER.url("invitation_id", invitation_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_invitation_id_accept_post_request( # pylint: disable=name-too-long @@ -399,39 +300,25 @@ def build_collaboration_invitation_id_accept_post_request( # pylint: disable=na _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/invitations/{invitation_id}/accept" + _url = "/collaborations/{collaborationId}/invitations/{invitationId}/accept" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", - collaboration_id, - "str"), - "invitation_id": _SERIALIZER.url( - "invitation_id", - invitation_id, - "str"), + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "invitationId": _SERIALIZER.url("invitation_id", invitation_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_analytics_datasets_list_get_request( # pylint: disable=name-too-long @@ -440,184 +327,138 @@ def build_collaboration_analytics_datasets_list_get_request( # pylint: disable= _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/datasets" + _url = "/collaborations/{collaborationId}/analytics/datasets" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_collaboration_analytics_dataset_document_id_get_request( # pylint: disable=name-too-long +def build_collaboration_analytics_datasets_document_id_get_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/datasets/{document_id}" + _url = "/collaborations/{collaborationId}/analytics/datasets/{documentId}" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( - "document_id", document_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "documentId": _SERIALIZER.url("document_id", document_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_collaboration_analytics_dataset_document_id_publish_post_request( # pylint: disable=name-too-long +def build_collaboration_analytics_datasets_document_id_publish_post_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/datasets/{document_id}/publish" + _url = "/collaborations/{collaborationId}/analytics/datasets/{documentId}/publish" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( - "document_id", document_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "documentId": _SERIALIZER.url("document_id", document_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_collaboration_check_consent_document_id_get_request( # pylint: disable=name-too-long +def build_collaboration_consent_document_id_get_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/consent/{document_id}" + _url = "/collaborations/{collaborationId}/consent/{documentId}" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( - "document_id", document_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "documentId": _SERIALIZER.url("document_id", document_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_collaboration_set_consent_document_id_put_request( # pylint: disable=name-too-long +def build_collaboration_consent_document_id_put_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/consent/{document_id}" + _url = "/collaborations/{collaborationId}/consent/{documentId}" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( - "document_id", document_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "documentId": _SERIALIZER.url("document_id", document_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_analytics_queries_document_id_publish_post_request( # pylint: disable=name-too-long @@ -626,39 +467,28 @@ def build_collaboration_analytics_queries_document_id_publish_post_request( # p _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/queries/{document_id}/publish" + _url = "/collaborations/{collaborationId}/analytics/queries/{documentId}/publish" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( - "document_id", document_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "documentId": _SERIALIZER.url("document_id", document_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_analytics_queries_list_get_request( # pylint: disable=name-too-long @@ -667,32 +497,24 @@ def build_collaboration_analytics_queries_list_get_request( # pylint: disable=n _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/queries" + _url = "/collaborations/{collaborationId}/analytics/queries" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_analytics_queries_document_id_get_request( # pylint: disable=name-too-long @@ -701,33 +523,25 @@ def build_collaboration_analytics_queries_document_id_get_request( # pylint: di _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/queries/{document_id}" + _url = "/collaborations/{collaborationId}/analytics/queries/{documentId}" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( - "document_id", document_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "documentId": _SERIALIZER.url("document_id", document_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_analytics_queries_document_id_vote_post_request( # pylint: disable=name-too-long @@ -736,39 +550,28 @@ def build_collaboration_analytics_queries_document_id_vote_post_request( # pyli _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/queries/{document_id}/vote" + _url = "/collaborations/{collaborationId}/analytics/queries/{documentId}/vote" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( - "document_id", document_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "documentId": _SERIALIZER.url("document_id", document_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_analytics_queries_document_id_run_post_request( # pylint: disable=name-too-long @@ -777,109 +580,82 @@ def build_collaboration_analytics_queries_document_id_run_post_request( # pylin _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/queries/{document_id}/run" + _url = "/collaborations/{collaborationId}/analytics/queries/{documentId}/run" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( - "document_id", document_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "documentId": _SERIALIZER.url("document_id", document_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_collaboration_analytics_queries_jobid_get_request( # pylint: disable=name-too-long - collaboration_id: str, jobid: str, **kwargs: Any +def build_collaboration_analytics_runs_job_id_get_request( # pylint: disable=name-too-long + collaboration_id: str, job_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/runs/{jobid}" + _url = "/collaborations/{collaborationId}/analytics/runs/{jobId}" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "jobid": _SERIALIZER.url( - "jobid", jobid, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_collaboration_analytics_queries_document_id_runhistory_get_request( # pylint: disable=name-too-long +def build_collaboration_analytics_queries_document_id_runs_get_request( # pylint: disable=name-too-long collaboration_id: str, document_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/queries/{document_id}/runs" + _url = "/collaborations/{collaborationId}/analytics/queries/{documentId}/runs" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( - "document_id", document_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "documentId": _SERIALIZER.url("document_id", document_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_analytics_datasets_document_id_queries_get_request( # pylint: disable=name-too-long @@ -888,33 +664,25 @@ def build_collaboration_analytics_datasets_document_id_queries_get_request( # p _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/datasets/{document_id}/queries" + _url = "/collaborations/{collaborationId}/analytics/datasets/{documentId}/queries" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "document_id": _SERIALIZER.url( - "document_id", document_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "documentId": _SERIALIZER.url("document_id", document_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_analytics_secrets_secret_name_put_request( # pylint: disable=name-too-long @@ -923,39 +691,28 @@ def build_collaboration_analytics_secrets_secret_name_put_request( # pylint: di _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop( - "Content-Type", None)) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/secrets/{secret_name}" + _url = "/collaborations/{collaborationId}/analytics/secrets/{secretName}" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), "secret_name": _SERIALIZER.url( - "secret_name", secret_name, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + "secretName": _SERIALIZER.url("secret_name", secret_name, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header( - "content_type", content_type, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_collaboration_analytics_auditevents_get_request( # pylint: disable=name-too-long @@ -969,39 +726,30 @@ def build_collaboration_analytics_auditevents_get_request( # pylint: disable=na _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop( - "api_version", _params.pop( - "api-version", "2026-03-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-03-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/collaborations/{collaboration_id}/analytics/auditevents" + _url = "/collaborations/{collaborationId}/analytics/auditevents" path_format_arguments = { - "collaboration_id": _SERIALIZER.url( - "collaboration_id", collaboration_id, "str"), } + "collaborationId": _SERIALIZER.url("collaboration_id", collaboration_id, "str"), + } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _params["api-version"] = _SERIALIZER.query( - "api_version", api_version, "str") if scope is not None: _params["scope"] = _SERIALIZER.query("scope", scope, "str") if from_seqno is not None: - _params["from_seqno"] = _SERIALIZER.query( - "from_seqno", from_seqno, "str") + _params["from_seqno"] = _SERIALIZER.query("from_seqno", from_seqno, "str") if to_seqno is not None: _params["to_seqno"] = _SERIALIZER.query("to_seqno", to_seqno, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_params, - headers=_headers, - **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) class CollaborationOperations: # pylint: disable=too-many-public-methods @@ -1016,17 +764,13 @@ class CollaborationOperations: # pylint: disable=too-many-public-methods def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client: PipelineClient = input_args.pop( - 0) if input_args else kwargs.pop("client") - self._config: AnalyticsFrontendAPIConfiguration = input_args.pop( - 0) if input_args else kwargs.pop("config") - self._serialize: Serializer = input_args.pop( - 0) if input_args else kwargs.pop("serializer") - self._deserialize: Deserializer = input_args.pop( - 0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AnalyticsFrontendAPIConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, *, active_only: bool = False, **kwargs: Any) -> List[JSON]: + def list_get(self, *, active_only: bool = False, **kwargs: Any) -> list[JSON]: """List all collaborations. List all collaborations. @@ -1061,9 +805,9 @@ def list(self, *, active_only: bool = False, **kwargs: Any) -> List[JSON]: _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[JSON]] = kwargs.pop("cls", None) + cls: ClsType[list[JSON]] = kwargs.pop("cls", None) - _request = build_collaboration_list_request( + _request = build_collaboration_list_get_request( active_only=active_only, api_version=self._config.api_version, headers=_headers, @@ -1079,10 +823,7 @@ def list(self, *, active_only: bool = False, **kwargs: Any) -> List[JSON]: response = pipeline_response.http_response if response.status_code not in [200]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1091,22 +832,12 @@ def list(self, *, active_only: bool = False, **kwargs: Any) -> List[JSON]: deserialized = None if cls: - return cls( - pipeline_response, - cast( - List[JSON], - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(list[JSON], deserialized), {}) # type: ignore - return cast(List[JSON], deserialized) # type: ignore + return cast(list[JSON], deserialized) # type: ignore @distributed_trace - def id_get( - self, - collaboration_id: str, - *, - active_only: bool = False, - **kwargs: Any) -> JSON: + def id_get(self, collaboration_id: str, *, active_only: bool = False, **kwargs: Any) -> JSON: """Get collaboration by id. Get collaboration by id. @@ -1168,10 +899,7 @@ def id_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1180,12 +908,7 @@ def id_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @@ -1278,10 +1001,7 @@ def report_get(self, collaboration_id: str, **kwargs: Any) -> JSON: response = pipeline_response.http_response if response.status_code not in [200, 400, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1290,12 +1010,7 @@ def report_get(self, collaboration_id: str, **kwargs: Any) -> JSON: deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @@ -1359,10 +1074,7 @@ def analytics_get(self, collaboration_id: str, **kwargs: Any) -> JSON: response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1371,20 +1083,12 @@ def analytics_get(self, collaboration_id: str, **kwargs: Any) -> JSON: deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace - def analytics_cleanroompolicy_get( - self, - collaboration_id: str, - **kwargs: Any) -> JSON: + def analytics_cleanroompolicy_get(self, collaboration_id: str, **kwargs: Any) -> JSON: """Get collaboration analytics cleanroompolicy. Get collaboration analytics cleanroompolicy. @@ -1447,10 +1151,7 @@ def analytics_cleanroompolicy_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1459,23 +1160,15 @@ def analytics_cleanroompolicy_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace - def oidc_issuer_info_get( - self, - collaboration_id: str, - **kwargs: Any) -> JSON: - """Get collaboration oidcissuer. + def oidc_issuer_info_get(self, collaboration_id: str, **kwargs: Any) -> JSON: + """Get collaboration OIDC issuer info. - Get collaboration oidcissuer. + Get collaboration OIDC issuer info. :param collaboration_id: Required. :type collaboration_id: str @@ -1533,10 +1226,7 @@ def oidc_issuer_info_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1545,12 +1235,7 @@ def oidc_issuer_info_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @@ -1644,11 +1329,9 @@ def oidc_set_issuer_url_post( """ @distributed_trace - def oidc_set_issuer_url_post(self, - collaboration_id: str, - body: Optional[Union[JSON, - IO[bytes]]] = None, - **kwargs: Any) -> JSON: + def oidc_set_issuer_url_post( + self, collaboration_id: str, body: Optional[Union[JSON, IO[bytes]]] = None, **kwargs: Any + ) -> JSON: """Set collaboration oidc issuer url. Set collaboration oidc issuer url. @@ -1694,8 +1377,7 @@ def oidc_set_issuer_url_post(self, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) @@ -1729,10 +1411,7 @@ def oidc_set_issuer_url_post(self, response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1741,12 +1420,7 @@ def oidc_set_issuer_url_post(self, deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @@ -1821,10 +1495,7 @@ def oidc_keys_get(self, collaboration_id: str, **kwargs: Any) -> JSON: response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1833,22 +1504,12 @@ def oidc_keys_get(self, collaboration_id: str, **kwargs: Any) -> JSON: deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace - def invitations_get( - self, - collaboration_id: str, - *, - pending_only: bool = False, - **kwargs: Any) -> JSON: + def invitations_get(self, collaboration_id: str, *, pending_only: bool = False, **kwargs: Any) -> JSON: """List all invitations. List all invitations. @@ -1910,10 +1571,7 @@ def invitations_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -1922,21 +1580,12 @@ def invitations_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace - def invitation_id_get( - self, - collaboration_id: str, - invitation_id: str, - **kwargs: Any) -> JSON: + def invitation_id_get(self, collaboration_id: str, invitation_id: str, **kwargs: Any) -> JSON: """Get invitation by id. Get invitation by id. @@ -2003,10 +1652,7 @@ def invitation_id_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -2015,21 +1661,12 @@ def invitation_id_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace - def invitation_id_accept_post( - self, - collaboration_id: str, - invitation_id: str, - **kwargs: Any) -> Optional[JSON]: + def invitation_id_accept_post(self, collaboration_id: str, invitation_id: str, **kwargs: Any) -> Optional[JSON]: """Accept invitation by id. Accept invitation by id. @@ -2084,10 +1721,7 @@ def invitation_id_accept_post( response = pipeline_response.http_response if response.status_code not in [204, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = None @@ -2103,10 +1737,7 @@ def invitation_id_accept_post( return deserialized # type: ignore @distributed_trace - def analytics_datasets_list_get( - self, - collaboration_id: str, - **kwargs: Any) -> JSON: + def analytics_datasets_list_get(self, collaboration_id: str, **kwargs: Any) -> JSON: """List all datasets. List all datasets. @@ -2169,10 +1800,7 @@ def analytics_datasets_list_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -2181,21 +1809,12 @@ def analytics_datasets_list_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace - def analytics_dataset_document_id_get( - self, - collaboration_id: str, - document_id: str, - **kwargs: Any) -> JSON: + def analytics_datasets_document_id_get(self, collaboration_id: str, document_id: str, **kwargs: Any) -> JSON: """Get dataset by id. Get dataset by id. @@ -2281,7 +1900,7 @@ def analytics_dataset_document_id_get( cls: ClsType[JSON] = kwargs.pop("cls", None) - _request = build_collaboration_analytics_dataset_document_id_get_request( + _request = build_collaboration_analytics_datasets_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, api_version=self._config.api_version, @@ -2298,10 +1917,7 @@ def analytics_dataset_document_id_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -2310,17 +1926,12 @@ def analytics_dataset_document_id_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @overload - def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long + def analytics_datasets_document_id_publish_post( # pylint: disable=name-too-long self, collaboration_id: str, document_id: str, @@ -2403,7 +2014,7 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long """ @overload - def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long + def analytics_datasets_document_id_publish_post( # pylint: disable=name-too-long self, collaboration_id: str, document_id: str, @@ -2443,7 +2054,7 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long """ @distributed_trace - def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long + def analytics_datasets_document_id_publish_post( # pylint: disable=name-too-long self, collaboration_id: str, document_id: str, body: Union[JSON, IO[bytes]], **kwargs: Any ) -> Optional[JSON]: """Publish dataset by id. @@ -2526,8 +2137,7 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) content_type = content_type or "application/json" @@ -2538,7 +2148,7 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long else: _json = body - _request = build_collaboration_analytics_dataset_document_id_publish_post_request( + _request = build_collaboration_analytics_datasets_document_id_publish_post_request( collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, @@ -2558,10 +2168,7 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long response = pipeline_response.http_response if response.status_code not in [204, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = None @@ -2577,11 +2184,7 @@ def analytics_dataset_document_id_publish_post( # pylint: disable=name-too-long return deserialized # type: ignore @distributed_trace - def check_consent_document_id_get( - self, - collaboration_id: str, - document_id: str, - **kwargs: Any) -> JSON: + def consent_document_id_get(self, collaboration_id: str, document_id: str, **kwargs: Any) -> JSON: """Check execution consent by ID of the Query or the Dataset. Check execution consent by ID of the Query or the Dataset. @@ -2627,7 +2230,7 @@ def check_consent_document_id_get( cls: ClsType[JSON] = kwargs.pop("cls", None) - _request = build_collaboration_check_consent_document_id_get_request( + _request = build_collaboration_consent_document_id_get_request( collaboration_id=collaboration_id, document_id=document_id, api_version=self._config.api_version, @@ -2644,10 +2247,7 @@ def check_consent_document_id_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -2656,17 +2256,12 @@ def check_consent_document_id_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @overload - def set_consent_document_id_put( + def consent_document_id_put( self, collaboration_id: str, document_id: str, @@ -2674,7 +2269,7 @@ def set_consent_document_id_put( *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Set execution consent (enable / disable) by ID of the Query or the Dataset. Set execution consent (enable / disable) by ID of the Query or the Dataset. @@ -2688,8 +2283,8 @@ def set_consent_document_id_put( :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -2711,7 +2306,7 @@ def set_consent_document_id_put( """ @overload - def set_consent_document_id_put( + def consent_document_id_put( self, collaboration_id: str, document_id: str, @@ -2719,7 +2314,7 @@ def set_consent_document_id_put( *, content_type: str = "application/json", **kwargs: Any - ) -> Union[Any, JSON]: + ) -> Optional[JSON]: """Set execution consent (enable / disable) by ID of the Query or the Dataset. Set execution consent (enable / disable) by ID of the Query or the Dataset. @@ -2733,8 +2328,8 @@ def set_consent_document_id_put( :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -2751,13 +2346,9 @@ def set_consent_document_id_put( """ @distributed_trace - def set_consent_document_id_put(self, - collaboration_id: str, - document_id: str, - body: Union[JSON, - IO[bytes]], - **kwargs: Any) -> Union[Any, - JSON]: + def consent_document_id_put( + self, collaboration_id: str, document_id: str, body: Union[JSON, IO[bytes]], **kwargs: Any + ) -> Optional[JSON]: """Set execution consent (enable / disable) by ID of the Query or the Dataset. Set execution consent (enable / disable) by ID of the Query or the Dataset. @@ -2768,8 +2359,8 @@ def set_consent_document_id_put(self, :type document_id: str :param body: Is either a JSON type or a IO[bytes] type. Required. :type body: JSON or IO[bytes] - :return: any or JSON object - :rtype: any or JSON + :return: JSON object or None + :rtype: JSON or None :raises ~azure.core.exceptions.HttpResponseError: Example: @@ -2800,9 +2391,8 @@ def set_consent_document_id_put(self, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Union[Any, JSON]] = kwargs.pop("cls", None) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -2812,7 +2402,7 @@ def set_consent_document_id_put(self, else: _json = body - _request = build_collaboration_set_consent_document_id_put_request( + _request = build_collaboration_consent_document_id_put_request( collaboration_id=collaboration_id, document_id=document_id, content_type=content_type, @@ -2831,23 +2421,21 @@ def set_consent_document_id_put(self, response = pipeline_response.http_response - if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + if response.status_code not in [204, 422]: + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - if response.content: - deserialized = response.json() - else: - deserialized = None + deserialized = None + if response.status_code == 422: + if response.content: + deserialized = response.json() + else: + deserialized = None if cls: - return cls(pipeline_response, cast( - Union[Any, JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore - return cast(Union[Any, JSON], deserialized) # type: ignore + return deserialized # type: ignore @overload def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long @@ -2998,8 +2586,7 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) content_type = content_type or "application/json" @@ -3030,10 +2617,7 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long response = pipeline_response.http_response if response.status_code not in [204, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = None @@ -3049,10 +2633,7 @@ def analytics_queries_document_id_publish_post( # pylint: disable=name-too-long return deserialized # type: ignore @distributed_trace - def analytics_queries_list_get( - self, - collaboration_id: str, - **kwargs: Any) -> JSON: + def analytics_queries_list_get(self, collaboration_id: str, **kwargs: Any) -> JSON: """List all queries. List all queries. @@ -3115,10 +2696,7 @@ def analytics_queries_list_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -3127,21 +2705,12 @@ def analytics_queries_list_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace - def analytics_queries_document_id_get( - self, - collaboration_id: str, - document_id: str, - **kwargs: Any) -> JSON: + def analytics_queries_document_id_get(self, collaboration_id: str, document_id: str, **kwargs: Any) -> JSON: """Get query by id. Get query by id. @@ -3222,10 +2791,7 @@ def analytics_queries_document_id_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -3234,12 +2800,7 @@ def analytics_queries_document_id_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @@ -3330,12 +2891,9 @@ def analytics_queries_document_id_vote_post( """ @distributed_trace - def analytics_queries_document_id_vote_post(self, - collaboration_id: str, - document_id: str, - body: Optional[Union[JSON, - IO[bytes]]] = None, - **kwargs: Any) -> Optional[JSON]: + def analytics_queries_document_id_vote_post( + self, collaboration_id: str, document_id: str, body: Optional[Union[JSON, IO[bytes]]] = None, **kwargs: Any + ) -> Optional[JSON]: """Vote on query by id. Vote on query by id. @@ -3379,8 +2937,7 @@ def analytics_queries_document_id_vote_post(self, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) content_type = content_type if body else None cls: ClsType[Optional[JSON]] = kwargs.pop("cls", None) @@ -3415,10 +2972,7 @@ def analytics_queries_document_id_vote_post(self, response = pipeline_response.http_response if response.status_code not in [204, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = None @@ -3653,8 +3207,7 @@ def analytics_queries_document_id_run_post( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[JSON] = kwargs.pop("cls", None) content_type = content_type or "application/json" @@ -3685,10 +3238,7 @@ def analytics_queries_document_id_run_post( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -3697,29 +3247,20 @@ def analytics_queries_document_id_run_post( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace - def analytics_queries_jobid_get( - self, - collaboration_id: str, - jobid: str, - **kwargs: Any) -> JSON: + def analytics_runs_job_id_get(self, collaboration_id: str, job_id: str, **kwargs: Any) -> JSON: """Get query run result by job id. Get query run result by job id. :param collaboration_id: Required. :type collaboration_id: str - :param jobid: Required. - :type jobid: str + :param job_id: Required. + :type job_id: str :return: JSON object :rtype: JSON :raises ~azure.core.exceptions.HttpResponseError: @@ -3770,9 +3311,9 @@ def analytics_queries_jobid_get( cls: ClsType[JSON] = kwargs.pop("cls", None) - _request = build_collaboration_analytics_queries_jobid_get_request( + _request = build_collaboration_analytics_runs_job_id_get_request( collaboration_id=collaboration_id, - jobid=jobid, + job_id=job_id, api_version=self._config.api_version, headers=_headers, params=_params, @@ -3787,10 +3328,7 @@ def analytics_queries_jobid_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -3799,19 +3337,14 @@ def analytics_queries_jobid_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @distributed_trace - def analytics_queries_document_id_runhistory_get( # pylint: disable=name-too-long + def analytics_queries_document_id_runs_get( self, collaboration_id: str, document_id: str, **kwargs: Any - ) -> Union[List[JSON], JSON]: + ) -> Union[list[JSON], JSON]: """Get query run history by query id. Get query run history by query id. @@ -3894,9 +3427,9 @@ def analytics_queries_document_id_runhistory_get( # pylint: disable=name-too-lo _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[Union[List[JSON], JSON]] = kwargs.pop("cls", None) + cls: ClsType[Union[list[JSON], JSON]] = kwargs.pop("cls", None) - _request = build_collaboration_analytics_queries_document_id_runhistory_get_request( + _request = build_collaboration_analytics_queries_document_id_runs_get_request( collaboration_id=collaboration_id, document_id=document_id, api_version=self._config.api_version, @@ -3913,10 +3446,7 @@ def analytics_queries_document_id_runhistory_get( # pylint: disable=name-too-lo response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -3925,15 +3455,14 @@ def analytics_queries_document_id_runhistory_get( # pylint: disable=name-too-lo deserialized = None if cls: - return cls(pipeline_response, cast( - Union[List[JSON], JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, cast(Union[list[JSON], JSON], deserialized), {}) # type: ignore - return cast(Union[List[JSON], JSON], deserialized) # type: ignore + return cast(Union[list[JSON], JSON], deserialized) # type: ignore @distributed_trace def analytics_datasets_document_id_queries_get( # pylint: disable=name-too-long self, collaboration_id: str, document_id: str, **kwargs: Any - ) -> Union[List[str], JSON]: + ) -> Union[list[str], JSON]: """Get queries by dataset id. Get queries by dataset id. @@ -3973,7 +3502,7 @@ def analytics_datasets_document_id_queries_get( # pylint: disable=name-too-long _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[Union[List[str], JSON]] = kwargs.pop("cls", None) + cls: ClsType[Union[list[str], JSON]] = kwargs.pop("cls", None) _request = build_collaboration_analytics_datasets_document_id_queries_get_request( collaboration_id=collaboration_id, @@ -3992,10 +3521,7 @@ def analytics_datasets_document_id_queries_get( # pylint: disable=name-too-long response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -4004,10 +3530,9 @@ def analytics_datasets_document_id_queries_get( # pylint: disable=name-too-long deserialized = None if cls: - return cls(pipeline_response, cast( - Union[List[str], JSON], deserialized), {}) # type: ignore + return cls(pipeline_response, cast(Union[list[str], JSON], deserialized), {}) # type: ignore - return cast(Union[List[str], JSON], deserialized) # type: ignore + return cast(Union[list[str], JSON], deserialized) # type: ignore @overload def analytics_secrets_secret_name_put( @@ -4103,12 +3628,9 @@ def analytics_secrets_secret_name_put( """ @distributed_trace - def analytics_secrets_secret_name_put(self, - collaboration_id: str, - secret_name: str, - body: Optional[Union[JSON, - IO[bytes]]] = None, - **kwargs: Any) -> JSON: + def analytics_secrets_secret_name_put( + self, collaboration_id: str, secret_name: str, body: Optional[Union[JSON, IO[bytes]]] = None, **kwargs: Any + ) -> JSON: """Set secret for analytics workload. Set secret for analytics workload. @@ -4155,8 +3677,7 @@ def analytics_secrets_secret_name_put(self, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop( - "content_type", _headers.pop("Content-Type", None)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) content_type = content_type if body else None cls: ClsType[JSON] = kwargs.pop("cls", None) @@ -4191,10 +3712,7 @@ def analytics_secrets_secret_name_put(self, response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -4203,12 +3721,7 @@ def analytics_secrets_secret_name_put(self, deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore @@ -4298,10 +3811,7 @@ def analytics_auditevents_get( response = pipeline_response.http_response if response.status_code not in [200, 422]: - map_error( - status_code=response.status_code, - response=response, - error_map=error_map) + map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: @@ -4310,11 +3820,6 @@ def analytics_auditevents_get( deserialized = None if cls: - return cls( - pipeline_response, - cast( - JSON, - deserialized), - {}) # type: ignore + return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore return cast(JSON, deserialized) # type: ignore diff --git a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_patch.py b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_patch.py index 2bd950a309c..87676c65a8f 100644 --- a/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_patch.py +++ b/src/managedcleanroom/azext_managedcleanroom/analytics_frontend_api/operations/_patch.py @@ -9,8 +9,7 @@ """ -# Add all objects you want publicly available to users at this package level -__all__: list[str] = [] +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_collaboration.py b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_collaboration.py index 4aeb88327a7..b42e5653640 100644 --- a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_collaboration.py +++ b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_collaboration.py @@ -38,7 +38,7 @@ def test_list_collaborations_success(self, mock_get_client): """Test listing collaborations returns correct data""" # Mock the client and its methods mock_client = Mock() - mock_client.collaboration.list.return_value = MOCK_COLLABORATION_LIST + mock_client.collaboration.list_get.return_value = MOCK_COLLABORATION_LIST mock_get_client.return_value = mock_client # Execute @@ -48,14 +48,14 @@ def test_list_collaborations_success(self, mock_get_client): self.assertEqual(len(result), 2) self.assertEqual(result[0]["collaborationId"], "collab-1") self.assertEqual(result[1]["collaborationId"], "collab-2") - mock_client.collaboration.list.assert_called_once() + mock_client.collaboration.list_get.assert_called_once() @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') def test_list_collaborations_empty(self, mock_get_client): """Test listing collaborations with no results""" # Mock the client mock_client = Mock() - mock_client.collaboration.list.return_value = [] + mock_client.collaboration.list_get.return_value = [] mock_get_client.return_value = mock_client # Execute @@ -293,7 +293,7 @@ def test_list_collaborations_with_active_only_filter( """Test listing collaborations with active_only filter""" # Mock the client mock_client = Mock() - mock_client.collaboration.list.return_value = [ + mock_client.collaboration.list_get.return_value = [ {"collaborationId": "collab-1", "name": "Active Collab 1", "status": "active"} ] mock_get_client.return_value = mock_client @@ -307,7 +307,7 @@ def test_list_collaborations_with_active_only_filter( # Verify self.assertEqual(len(result), 1) self.assertEqual(result[0]["status"], "active") - mock_client.collaboration.list.assert_called_once_with( + mock_client.collaboration.list_get.assert_called_once_with( active_only=True) @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') diff --git a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_dataset.py b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_dataset.py index d2c251cb5fa..36c3069f9af 100644 --- a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_dataset.py +++ b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_dataset.py @@ -54,7 +54,7 @@ def test_show_dataset_success(self, mock_get_client): """Test showing a specific dataset""" # Mock the client mock_client = Mock() - mock_client.collaboration.analytics_dataset_document_id_get.return_value = MOCK_DATASET + mock_client.collaboration.analytics_datasets_document_id_get.return_value = MOCK_DATASET mock_get_client.return_value = mock_client # Execute @@ -68,7 +68,7 @@ def test_show_dataset_success(self, mock_get_client): self.assertEqual(result["datasetId"], "test-dataset-123") self.assertEqual(result["name"], "Customer Data") self.assertEqual(result["status"], "published") - mock_client.collaboration.analytics_dataset_document_id_get.assert_called_once_with( + mock_client.collaboration.analytics_datasets_document_id_get.assert_called_once_with( "test-collab-123", "test-dataset-123") # Publish Dataset Tests @@ -83,7 +83,7 @@ def test_publish_dataset_success(self, mock_get_client): "publishedAt": "2024-01-01T00:00:00Z" } mock_client = Mock() - mock_client.collaboration.analytics_dataset_document_id_publish_post.return_value = mock_publish_response + mock_client.collaboration.analytics_datasets_document_id_publish_post.return_value = mock_publish_response mock_get_client.return_value = mock_client # Test body @@ -108,7 +108,7 @@ def test_publish_dataset_success(self, mock_get_client): # Verify self.assertEqual(result["datasetId"], "test-dataset-123") self.assertEqual(result["status"], "published") - mock_client.collaboration.analytics_dataset_document_id_publish_post.assert_called_once_with( + mock_client.collaboration.analytics_datasets_document_id_publish_post.assert_called_once_with( "test-collab-123", "test-dataset-123", test_body) @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') @@ -116,7 +116,7 @@ def test_publish_dataset_failure(self, mock_get_client): """Test handling publish failure (ERROR SCENARIO)""" # Mock error mock_client = Mock() - mock_client.collaboration.analytics_dataset_document_id_publish_post.side_effect = Exception( + mock_client.collaboration.analytics_datasets_document_id_publish_post.side_effect = Exception( "Dataset validation failed") mock_get_client.return_value = mock_client diff --git a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_misc.py b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_misc.py index d70a49548a8..78d4a897666 100644 --- a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_misc.py +++ b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_misc.py @@ -119,7 +119,7 @@ def test_check_consent(self, mock_get_client): """Test checking consent status""" # Mock the client and its method chain mock_client = Mock() - mock_client.collaboration.check_consent_document_id_get.return_value = { + mock_client.collaboration.consent_document_id_get.return_value = { "documentId": "doc-123", "consentGiven": True, "consentedAt": "2024-01-01T00:00:00Z" @@ -136,7 +136,7 @@ def test_check_consent(self, mock_get_client): # Verify self.assertEqual(result["documentId"], "doc-123") self.assertTrue(result["consentGiven"]) - mock_client.collaboration.check_consent_document_id_get.assert_called_once_with( + mock_client.collaboration.consent_document_id_get.assert_called_once_with( "test-collab-123", "doc-123") @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') @@ -144,7 +144,7 @@ def test_set_consent(self, mock_get_client): """Test setting consent action""" # Mock the client and its method chain mock_client = Mock() - mock_client.collaboration.set_consent_document_id_put.return_value = { + mock_client.collaboration.consent_document_id_put.return_value = { "documentId": "doc-123", "action": "enable", "updatedAt": "2024-01-01T00:00:00Z"} mock_get_client.return_value = mock_client @@ -158,7 +158,7 @@ def test_set_consent(self, mock_get_client): # Verify self.assertEqual(result["action"], "enable") - mock_client.collaboration.set_consent_document_id_put.assert_called_once_with( + mock_client.collaboration.consent_document_id_put.assert_called_once_with( "test-collab-123", "doc-123", body={"consentAction": "enable"}) # Audit Test From 3a84a088d5fdc15d6d1c43dea99183484eaec7f9 Mon Sep 17 00:00:00 2001 From: Saksham Garg Date: Fri, 20 Mar 2026 17:20:58 +0530 Subject: [PATCH 05/10] parameterize cmdlets --- .../_frontend_custom.py | 286 +++++++++++++++++- .../azext_managedcleanroom/_help.py | 244 +++++++++++---- .../azext_managedcleanroom/_params.py | 39 +++ .../tests/latest/test_frontend_dataset.py | 197 ++++++++++++ .../tests/latest/test_frontend_query.py | 252 +++++++++++++++ 5 files changed, 953 insertions(+), 65 deletions(-) diff --git a/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py b/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py index 39bf0ca0c7d..7210f9b1a47 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py +++ b/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py @@ -227,21 +227,169 @@ def frontend_collaboration_dataset_show( def frontend_collaboration_dataset_publish( - cmd, collaboration_id, document_id, body, api_version=None): + cmd, collaboration_id, document_id, + body=None, + storage_account_url=None, + container_name=None, + storage_account_type=None, + encryption_mode=None, + schema_file=None, + schema_format=None, + access_mode=None, + allowed_fields=None, + identity_name=None, + identity_client_id=None, + identity_tenant_id=None, + identity_issuer_url=None, + dek_keyvault_url=None, + dek_secret_id=None, + kek_keyvault_url=None, + kek_secret_id=None, + kek_maa_url=None, + api_version=None): """Publish a dataset :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param document_id: Dataset document identifier - :param body: Publish configuration JSON (string, dict, or @file) + :param body: Publish configuration JSON (string, dict, or @file) - legacy mode + :param storage_account_url: Azure Storage account URL + :param container_name: Blob container name + :param storage_account_type: Storage account type + :param encryption_mode: Encryption mode (SSE or CPK) + :param schema_file: Path to schema file (@path/to/schema.json) + :param schema_format: Schema format (default: Delta) + :param access_mode: Access mode + :param allowed_fields: Comma-separated allowed field names + :param identity_name: Managed identity name + :param identity_client_id: Client ID + :param identity_tenant_id: Tenant ID + :param identity_issuer_url: OIDC issuer URL + :param dek_keyvault_url: DEK Key Vault URL (CPK mode) + :param dek_secret_id: DEK secret ID (CPK mode) + :param kek_keyvault_url: KEK Key Vault URL (CPK mode) + :param kek_secret_id: KEK secret ID (CPK mode) + :param kek_maa_url: KEK MAA URL (CPK mode) :param api_version: API version to use for this request :return: Publish result """ import json + from azure.cli.core.util import CLIError - # Handle body parameter - convert string to dict if needed - if isinstance(body, str): - body = json.loads(body) + # Check for mutual exclusion: body vs parameters + has_params = any([ + storage_account_url, container_name, storage_account_type, encryption_mode, + schema_file, access_mode, identity_name, identity_client_id, + identity_tenant_id, identity_issuer_url + ]) + + if body and has_params: + raise CLIError('Cannot use --body together with individual parameters. Use either --body or the parameter flags.') + + # Legacy mode: use body directly + if body: + if isinstance(body, str): + body = json.loads(body) + client = get_frontend_client(cmd, api_version=api_version) + return client.collaboration.analytics_datasets_document_id_publish_post( + collaboration_id, document_id, body) + + # Parameter mode: construct body from parameters + if not has_params: + raise CLIError('Either --body or individual parameters (--storage-account-url, --container-name, etc.) must be provided.') + + # Validate required parameters + required_params = { + 'storage_account_url': storage_account_url, + 'container_name': container_name, + 'storage_account_type': storage_account_type, + 'encryption_mode': encryption_mode, + 'schema_file': schema_file, + 'access_mode': access_mode, + 'identity_name': identity_name, + 'identity_client_id': identity_client_id, + 'identity_tenant_id': identity_tenant_id, + 'identity_issuer_url': identity_issuer_url + } + + missing = [k for k, v in required_params.items() if v is None] + if missing: + raise CLIError(f'Missing required parameters: {", ".join(f"--{k.replace("_", "-")}" for k in missing)}') + + # Validate CPK parameters if encryption_mode is CPK + if encryption_mode and encryption_mode.upper() == 'CPK': + cpk_params = { + 'dek_keyvault_url': dek_keyvault_url, + 'dek_secret_id': dek_secret_id, + 'kek_keyvault_url': kek_keyvault_url, + 'kek_secret_id': kek_secret_id, + 'kek_maa_url': kek_maa_url + } + missing_cpk = [k for k, v in cpk_params.items() if v is None] + if missing_cpk: + raise CLIError(f'CPK encryption mode requires: {", ".join(f"--{k.replace("_", "-")}" for k in missing_cpk)}') + + # Load schema from file + schema_content = None + if schema_file.startswith('@'): + schema_path = schema_file[1:] + try: + with open(schema_path, 'r') as f: + schema_content = json.load(f) + except FileNotFoundError: + raise CLIError(f'Schema file not found: {schema_path}') + except json.JSONDecodeError as e: + raise CLIError(f'Invalid JSON in schema file: {str(e)}') + else: + raise CLIError('--schema-file must be a file path prefixed with @ (e.g., @schema.json)') + + # Override format if provided + if schema_format: + schema_content['format'] = schema_format + + # Build datasetAccessPolicy + dataset_access_policy = { + 'accessMode': access_mode + } + if allowed_fields: + dataset_access_policy['allowedFields'] = [f.strip() for f in allowed_fields.split(',')] + + # Build store configuration + store = { + 'storageAccountUrl': storage_account_url, + 'containerName': container_name, + 'storageAccountType': storage_account_type, + 'encryptionMode': encryption_mode + } + + # Build identity + identity = { + 'name': identity_name, + 'clientId': identity_client_id, + 'tenantId': identity_tenant_id, + 'issuerUrl': identity_issuer_url + } + + # Construct final body + body = { + 'name': document_id, + 'datasetSchema': schema_content, + 'datasetAccessPolicy': dataset_access_policy, + 'store': store, + 'identity': identity + } + + # Add DEK/KEK for CPK mode + if encryption_mode and encryption_mode.upper() == 'CPK': + body['dek'] = { + 'keyVaultUrl': dek_keyvault_url, + 'secretId': dek_secret_id + } + body['kek'] = { + 'keyVaultUrl': kek_keyvault_url, + 'secretId': kek_secret_id, + 'maaUrl': kek_maa_url + } client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_datasets_document_id_publish_post( @@ -340,32 +488,128 @@ def frontend_collaboration_query_show( def frontend_collaboration_query_publish( - cmd, collaboration_id, document_id, body, api_version=None): + cmd, collaboration_id, document_id, + body=None, + query_segment=None, + execution_sequence=None, + input_datasets=None, + output_dataset=None, + api_version=None): """Publish a query :param cmd: CLI command context :param collaboration_id: Collaboration identifier :param document_id: Query document identifier - :param body: Publish configuration JSON (string, dict, or @file) + :param body: Publish configuration JSON (string, dict, or @file) - legacy mode + :param query_segment: List of query segments (repeatable, @file.sql or inline SQL) + :param execution_sequence: Comma-separated execution sequence numbers + :param input_datasets: Comma-separated input datasets (datasetId:viewName pairs) + :param output_dataset: Output dataset (datasetId:viewName) :param api_version: API version to use for this request :return: Publish result """ import json + from azure.cli.core.util import CLIError - # Handle body parameter - convert string to dict if needed - if isinstance(body, str): - body = json.loads(body) + # Check for mutual exclusion: body vs parameters + has_params = any([query_segment, execution_sequence, input_datasets, output_dataset]) + + if body and has_params: + raise CLIError('Cannot use --body together with individual parameters. Use either --body or the parameter flags.') + + # Legacy mode: use body directly + if body: + if isinstance(body, str): + body = json.loads(body) + client = get_frontend_client(cmd, api_version=api_version) + return client.collaboration.analytics_queries_document_id_publish_post( + collaboration_id, document_id, body) + + # Parameter mode: construct body from parameters + if not has_params: + raise CLIError('Either --body or individual parameters (--query-segment, --execution-sequence, etc.) must be provided.') + + # Validate required parameters + if not query_segment: + raise CLIError('--query-segment is required (can be specified multiple times)') + if not execution_sequence: + raise CLIError('--execution-sequence is required') + if not input_datasets: + raise CLIError('--input-datasets is required') + if not output_dataset: + raise CLIError('--output-dataset is required') + + # Parse query segments + segments = [] + for seg in query_segment: + if seg.startswith('@'): + # Load from file + file_path = seg[1:] + try: + with open(file_path, 'r') as f: + segments.append(f.read()) + except FileNotFoundError: + raise CLIError(f'Query segment file not found: {file_path}') + else: + # Inline SQL + segments.append(seg) + + # Parse execution sequence + try: + exec_seq = [int(x.strip()) for x in execution_sequence.split(',')] + except ValueError: + raise CLIError('--execution-sequence must be comma-separated integers (e.g., "1,1,2")') + + # Validate segment count matches execution sequence count + if len(segments) != len(exec_seq): + raise CLIError(f'Number of query segments ({len(segments)}) must match execution sequence count ({len(exec_seq)})') + + # Build queryData array + query_data = [] + for sql, seq in zip(segments, exec_seq): + query_data.append({ + 'data': sql, + 'executionSequence': seq, + 'preConditions': '', + 'postFilters': '' + }) + + # Parse input datasets (comma-separated datasetId:viewName pairs) + input_ds_list = [] + for ds in input_datasets.split(','): + ds = ds.strip() + if ':' not in ds: + raise CLIError(f'Invalid input dataset format: {ds}. Expected format: datasetId:viewName') + dataset_id, view_name = ds.split(':', 1) + input_ds_list.append(f'{dataset_id.strip()}:{view_name.strip()}') + input_ds_str = ','.join(input_ds_list) + + # Parse output dataset + if ':' not in output_dataset: + raise CLIError(f'Invalid output dataset format: {output_dataset}. Expected format: datasetId:viewName') + + # Construct body + body = { + 'inputDatasets': input_ds_str, + 'outputDataset': output_dataset, + 'queryData': query_data + } client = get_frontend_client(cmd, api_version=api_version) return client.collaboration.analytics_queries_document_id_publish_post( collaboration_id, document_id, body) + def frontend_collaboration_query_run( cmd, collaboration_id, document_id, body=None, + dry_run=False, + start_date=None, + end_date=None, + use_optimizer=False, api_version=None): """Run a query @@ -374,11 +618,22 @@ def frontend_collaboration_query_run( :param document_id: Query document identifier :param body: Run configuration JSON (string, dict, or @file). Optional fields: runId (auto-generated if not provided), dryRun, startDate, endDate, useOptimizer + :param dry_run: Perform a dry run without executing the query + :param start_date: Start date for query execution + :param end_date: End date for query execution + :param use_optimizer: Use query optimizer :param api_version: API version to use for this request :return: Run result """ import json import uuid + from azure.cli.core.util import CLIError + + # Check for mutual exclusion: body vs parameters + has_params = any([dry_run, start_date, end_date, use_optimizer]) + + if body and has_params: + raise CLIError('Cannot use --body together with individual parameters. Use either --body or the parameter flags.') # Handle body parameter - convert string to dict if needed if body and isinstance(body, str): @@ -388,6 +643,17 @@ def frontend_collaboration_query_run( if not body: body = {} + # Add parameter values to body if in parameter mode + if has_params: + if dry_run: + body['dryRun'] = True + if start_date: + body['startDate'] = start_date + if end_date: + body['endDate'] = end_date + if use_optimizer: + body['useOptimizer'] = True + # Auto-generate runId if not provided if 'runId' not in body: body['runId'] = str(uuid.uuid4()) diff --git a/src/managedcleanroom/azext_managedcleanroom/_help.py b/src/managedcleanroom/azext_managedcleanroom/_help.py index a80dcbf9512..a8423a30cc9 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_help.py +++ b/src/managedcleanroom/azext_managedcleanroom/_help.py @@ -345,29 +345,115 @@ helps['managedcleanroom frontend analytics dataset publish'] = """ type: command - short-summary: Publish a dataset + short-summary: Publish a dataset to the collaboration long-summary: | - Publishes a dataset with configuration including dataset access point, - protection settings, encryption secrets, and identity configuration. - - The body parameter must contain a JSON object with: - - data.datasetAccessPoint: Dataset access configuration - - name: Access point name - - path: Dataset path - - protection: Protection configuration with proxyMode, proxyType, etc. + Publish a dataset configuration with storage, schema, access policy, and identity information. + Supports both SSE (Server-Side Encryption) and CPK (Customer-Provided Key) encryption modes. + You can use either individual parameters or a JSON body file for configuration. + parameters: + - name: --collaboration-id -c + type: string + short-summary: Collaboration identifier + - name: --document-id -d + type: string + short-summary: Dataset document identifier + - name: --body + type: string + short-summary: JSON configuration file path (@file.json) or JSON string (legacy mode) + - name: --storage-account-url + type: string + short-summary: Azure Storage account URL + - name: --container-name + type: string + short-summary: Blob container name + - name: --storage-account-type + type: string + short-summary: Storage account type (e.g., AzureStorageAccount) + - name: --encryption-mode + type: string + short-summary: Encryption mode (SSE or CPK) + - name: --schema-file + type: string + short-summary: Path to schema file (@path/to/schema.json) containing field definitions + - name: --schema-format + type: string + short-summary: Schema format (default is Delta) + - name: --access-mode + type: string + short-summary: Access mode (e.g., ReadWrite) + - name: --allowed-fields + type: string + short-summary: Comma-separated list of allowed field names + - name: --identity-name + type: string + short-summary: Managed identity name + - name: --identity-client-id + type: string + short-summary: Managed identity client ID (GUID) + - name: --identity-tenant-id + type: string + short-summary: Tenant ID (GUID) + - name: --identity-issuer-url + type: string + short-summary: OIDC issuer URL (HTTPS) + - name: --dek-keyvault-url + type: string + short-summary: Key Vault URL for DEK (CPK mode only) + - name: --dek-secret-id + type: string + short-summary: DEK secret ID (CPK mode only) + - name: --kek-keyvault-url + type: string + short-summary: Key Vault URL for KEK (CPK mode only) + - name: --kek-secret-id + type: string + short-summary: KEK secret ID (CPK mode only) + - name: --kek-maa-url + type: string + short-summary: MAA URL for KEK (CPK mode only) examples: - - name: Publish a dataset with configuration from file + - name: Publish a dataset using SSE encryption with individual parameters text: | az managedcleanroom frontend analytics dataset publish \ - --collaboration-id \ - --document-id \ - --body @publish-config.json - - name: Publish a dataset with inline JSON + --collaboration-id my-collab-123 \ + --document-id my-dataset \ + --storage-account-url https://mystorageaccount.blob.core.windows.net \ + --container-name datasets \ + --storage-account-type AzureStorageAccount \ + --encryption-mode SSE \ + --schema-file @schema.json \ + --access-mode ReadWrite \ + --allowed-fields "customer_id,revenue,date" \ + --identity-name northwind-identity \ + --identity-client-id fb907136-1234-5678-9abc-def012345678 \ + --identity-tenant-id 72f988bf-1234-5678-9abc-def012345678 \ + --identity-issuer-url https://oidc.example.com/issuer + - name: Publish a dataset using CPK encryption with individual parameters text: | az managedcleanroom frontend analytics dataset publish \ - --collaboration-id \ - --document-id \ - --body '{"data": {"datasetAccessPoint": {"name": "my-dataset", "path": "/data/path", "protection": {}}}}' + --collaboration-id my-collab-123 \ + --document-id my-dataset \ + --storage-account-url https://mystorageaccount.blob.core.windows.net \ + --container-name datasets \ + --storage-account-type AzureStorageAccount \ + --encryption-mode CPK \ + --schema-file @schema.json \ + --access-mode ReadWrite \ + --identity-name northwind-identity \ + --identity-client-id fb907136-1234-5678-9abc-def012345678 \ + --identity-tenant-id 72f988bf-1234-5678-9abc-def012345678 \ + --identity-issuer-url https://oidc.example.com/issuer \ + --dek-keyvault-url https://mykeyvault.vault.azure.net \ + --dek-secret-id dek-secret-123 \ + --kek-keyvault-url https://mykeyvault.vault.azure.net \ + --kek-secret-id kek-secret-123 \ + --kek-maa-url https://sharedeus.eus.attest.azure.net + - name: Publish a dataset using a JSON body file (legacy mode) + text: | + az managedcleanroom frontend analytics dataset publish \ + --collaboration-id my-collab-123 \ + --document-id my-dataset \ + --body @dataset-config.json """ @@ -431,68 +517,116 @@ helps['managedcleanroom frontend analytics query publish'] = """ type: command - short-summary: Publish a query + short-summary: Publish a query to the collaboration long-summary: | - Publishes a query with configuration including input datasets, output dataset, - and query execution segments. - - The body parameter must contain a JSON object with: - - inputDatasets: Array of input dataset configurations - - outputDataset: Output dataset configuration - - queryData: Query execution data with segments + Publish a query configuration with SQL segments, execution sequence, and dataset mappings. + Query segments can be loaded from files or provided inline. The execution sequence defines + which segments run in parallel (same number) or sequentially (different numbers). + parameters: + - name: --collaboration-id -c + type: string + short-summary: Collaboration identifier + - name: --document-id -d + type: string + short-summary: Query document identifier + - name: --body + type: string + short-summary: JSON configuration file path (@file.json) or JSON string (legacy mode) + - name: --query-segment + type: string + short-summary: Query segment SQL (@file.sql or inline). Repeatable. Order matters. + - name: --execution-sequence + type: string + short-summary: Comma-separated execution sequence numbers (e.g., "1,1,2"). Must match segment count. + - name: --input-datasets + type: string + short-summary: Comma-separated input datasets as datasetId:viewName pairs + - name: --output-dataset + type: string + short-summary: Output dataset as datasetId:viewName examples: - - name: Publish a query with configuration from file + - name: Publish a query with SQL segments from files text: | az managedcleanroom frontend analytics query publish \ - --collaboration-id \ - --document-id \ - --body @query-publish-config.json - - name: Publish a query with inline JSON + --collaboration-id my-collab-123 \ + --document-id my-query \ + --query-segment @segment1.sql \ + --query-segment @segment2.sql \ + --query-segment @segment3.sql \ + --execution-sequence "1,1,2" \ + --input-datasets "dataset1:view1,dataset2:view2" \ + --output-dataset "output-dataset:results" + - name: Publish a query with inline SQL text: | az managedcleanroom frontend analytics query publish \ - --collaboration-id \ - --document-id \ - --body '{"inputDatasets": [{"datasetDocumentId": "...", "view": "..."}], "outputDataset": {}, "queryData": {"segments": []}}' + --collaboration-id my-collab-123 \ + --document-id my-query \ + --query-segment "SELECT * FROM table1" \ + --query-segment "SELECT * FROM table2" \ + --execution-sequence "1,2" \ + --input-datasets "dataset1:view1" \ + --output-dataset "output-dataset:results" + - name: Publish a query using a JSON body file (legacy mode) + text: | + az managedcleanroom frontend analytics query publish \ + --collaboration-id my-collab-123 \ + --document-id my-query \ + --body @query-config.json """ helps['managedcleanroom frontend analytics query run'] = """ type: command - short-summary: Run a query + short-summary: Run a query in the collaboration long-summary: | - Executes a query against the collaboration's analytics workload. - The run configuration can be provided via --body parameter. - If runId is not specified in body, it will be auto-generated. + Execute a published query with optional configuration parameters. + A run ID is automatically generated if not provided. parameters: + - name: --collaboration-id -c + type: string + short-summary: Collaboration identifier - name: --document-id -d type: string short-summary: Query document identifier - name: --body type: string - short-summary: Run configuration (JSON string or @file path) - long-summary: | - Optional JSON configuration containing: - - runId: Unique run identifier (auto-generated if not provided) - - dryRun: Boolean flag for dry run mode - - startDate: ISO-8601 formatted start date - - endDate: ISO-8601 formatted end date - - useOptimizer: Boolean flag to enable query optimizer + short-summary: JSON configuration file path (@file.json) or JSON string (legacy mode) + - name: --dry-run + type: bool + short-summary: Perform a dry run without executing the query + - name: --start-date + type: string + short-summary: Start date for query execution + - name: --end-date + type: string + short-summary: End date for query execution + - name: --use-optimizer + type: bool + short-summary: Use query optimizer examples: - - name: Run query with auto-generated run ID + - name: Run a query with default settings text: | az managedcleanroom frontend analytics query run \ - --collaboration-id \ - --document-id - - name: Run query with specific configuration + --collaboration-id my-collab-123 \ + --document-id my-query + - name: Run a query with dry run and date range text: | az managedcleanroom frontend analytics query run \ - --collaboration-id \ - --document-id \ - --body '{"runId": "my-run-001", "dryRun": false, "useOptimizer": true}' - - name: Run query with configuration from file + --collaboration-id my-collab-123 \ + --document-id my-query \ + --dry-run \ + --start-date "2024-01-01" \ + --end-date "2024-12-31" + - name: Run a query with optimizer enabled text: | az managedcleanroom frontend analytics query run \ - --collaboration-id \ - --document-id \ + --collaboration-id my-collab-123 \ + --document-id my-query \ + --use-optimizer + - name: Run a query using a JSON body file (legacy mode) + text: | + az managedcleanroom frontend analytics query run \ + --collaboration-id my-collab-123 \ + --document-id my-query \ --body @run-config.json """ diff --git a/src/managedcleanroom/azext_managedcleanroom/_params.py b/src/managedcleanroom/azext_managedcleanroom/_params.py index ce5a76bbe38..a48351b6040 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_params.py +++ b/src/managedcleanroom/azext_managedcleanroom/_params.py @@ -117,6 +117,35 @@ def load_arguments(self, _): # pylint: disable=unused-argument type=str, help='JSON string or @file path containing publish configuration. ' 'Must include datasetAccessPoint with name, path, and protection details.') + + # Storage parameters + c.argument('storage_account_url', options_list=['--storage-account-url'], help='Azure Storage account URL') + c.argument('container_name', options_list=['--container-name'], help='Blob container name') + c.argument('storage_account_type', options_list=['--storage-account-type'], help='Storage account type (e.g., AzureStorageAccount)') + c.argument('encryption_mode', options_list=['--encryption-mode'], help='Encryption mode: SSE or CPK') + + # Schema parameters + c.argument('schema_file', options_list=['--schema-file'], help='Path to schema file (@path/to/schema.json) containing field definitions') + c.argument('schema_format', options_list=['--schema-format'], help='Schema format (default: Delta)') + + # Access policy parameters + c.argument('access_mode', options_list=['--access-mode'], help='Access mode (e.g., ReadWrite)') + c.argument('allowed_fields', options_list=['--allowed-fields'], help='Comma-separated list of allowed field names') + + # Identity parameters + c.argument('identity_name', options_list=['--identity-name'], help='Managed identity name') + c.argument('identity_client_id', options_list=['--identity-client-id'], help='Managed identity client ID (GUID)') + c.argument('identity_tenant_id', options_list=['--identity-tenant-id'], help='Tenant ID (GUID)') + c.argument('identity_issuer_url', options_list=['--identity-issuer-url'], help='OIDC issuer URL (HTTPS)') + + # CPK DEK parameters + c.argument('dek_keyvault_url', options_list=['--dek-keyvault-url'], help='Key Vault URL for DEK (CPK mode only)') + c.argument('dek_secret_id', options_list=['--dek-secret-id'], help='DEK secret ID (CPK mode only)') + + # CPK KEK parameters + c.argument('kek_keyvault_url', options_list=['--kek-keyvault-url'], help='Key Vault URL for KEK (CPK mode only)') + c.argument('kek_secret_id', options_list=['--kek-secret-id'], help='KEK secret ID (CPK mode only)') + c.argument('kek_maa_url', options_list=['--kek-maa-url'], help='MAA URL for KEK (CPK mode only)') # Dataset queries context with self.argument_context('managedcleanroom frontend analytics dataset queries') as c: @@ -144,12 +173,22 @@ def load_arguments(self, _): # pylint: disable=unused-argument type=str, help='JSON string or @file path containing publish configuration. ' 'Must include inputDatasets, outputDataset, and queryData.') + + c.argument('query_segment', options_list=['--query-segment'], action='append', help='Query segment SQL (@file.sql or inline). Repeatable. Order matters.') + c.argument('execution_sequence', options_list=['--execution-sequence'], help='Comma-separated execution sequence numbers (e.g., "1,1,2"). Must match segment count.') + c.argument('input_datasets', options_list=['--input-datasets'], help='Comma-separated input datasets as datasetId:viewName pairs') + c.argument('output_dataset', options_list=['--output-dataset'], help='Output dataset as datasetId:viewName') with self.argument_context('managedcleanroom frontend analytics query run') as c: c.argument('document_id', document_id_type) c.argument( 'body', type=str, help='JSON string or @file path containing run configuration. ' 'Optional fields: runId (auto-generated if not provided), dryRun, startDate, endDate, useOptimizer.') + + c.argument('dry_run', options_list=['--dry-run'], action='store_true', help='Perform a dry run without executing the query') + c.argument('start_date', options_list=['--start-date'], help='Start date for query execution') + c.argument('end_date', options_list=['--end-date'], help='End date for query execution') + c.argument('use_optimizer', options_list=['--use-optimizer'], action='store_true', help='Use query optimizer') # Query vote context (unified) with self.argument_context('managedcleanroom frontend analytics query vote') as c: diff --git a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_dataset.py b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_dataset.py index 36c3069f9af..818b9593b39 100644 --- a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_dataset.py +++ b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_dataset.py @@ -8,6 +8,7 @@ Tests dataset list, show, and publish commands from _frontend_custom.py. """ +import json import unittest from unittest.mock import Mock, patch from azext_managedcleanroom._frontend_custom import ( @@ -134,6 +135,202 @@ def test_publish_dataset_failure(self, mock_get_client): self.assertIn("validation failed", str(context.exception)) + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_dataset_with_sse_parameters(self, mock_get_client): + """Test publishing a dataset using SSE parameters""" + # Mock publish response + mock_publish_response = { + "datasetId": "test-dataset-123", + "status": "published", + "publishedAt": "2024-01-01T00:00:00Z" + } + mock_client = Mock() + mock_client.collaboration.analytics_datasets_document_id_publish_post.return_value = mock_publish_response + mock_get_client.return_value = mock_client + + # Mock file reading + test_schema = { + "fields": [ + {"fieldName": "customer_id", "fieldType": "string"}, + {"fieldName": "revenue", "fieldType": "decimal"} + ], + "format": "Delta" + } + + with patch('builtins.open', unittest.mock.mock_open(read_data=json.dumps(test_schema))): + # Execute with SSE parameters + result = frontend_collaboration_dataset_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-dataset-123", + body=None, + storage_account_url="https://mystorageaccount.blob.core.windows.net", + container_name="datasets", + storage_account_type="AzureStorageAccount", + encryption_mode="SSE", + schema_file="@schema.json", + schema_format=None, + access_mode="ReadWrite", + allowed_fields="customer_id,revenue", + identity_name="northwind-identity", + identity_client_id="fb907136-1234-5678-9abc-def012345678", + identity_tenant_id="72f988bf-1234-5678-9abc-def012345678", + identity_issuer_url="https://oidc.example.com/issuer", + dek_keyvault_url=None, + dek_secret_id=None, + kek_keyvault_url=None, + kek_secret_id=None, + kek_maa_url=None + ) + + # Verify + self.assertEqual(result["datasetId"], "test-dataset-123") + self.assertEqual(result["status"], "published") + + # Verify the body was constructed correctly + call_args = mock_client.collaboration.analytics_datasets_document_id_publish_post.call_args + body = call_args[0][2] + self.assertEqual(body["name"], "test-dataset-123") + self.assertEqual(body["store"]["storageAccountUrl"], "https://mystorageaccount.blob.core.windows.net") + self.assertEqual(body["store"]["encryptionMode"], "SSE") + self.assertEqual(body["identity"]["name"], "northwind-identity") + self.assertNotIn("dek", body) + self.assertNotIn("kek", body) + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_dataset_with_cpk_parameters(self, mock_get_client): + """Test publishing a dataset using CPK parameters""" + # Mock publish response + mock_publish_response = { + "datasetId": "test-dataset-cpk", + "status": "published", + "publishedAt": "2024-01-01T00:00:00Z" + } + mock_client = Mock() + mock_client.collaboration.analytics_datasets_document_id_publish_post.return_value = mock_publish_response + mock_get_client.return_value = mock_client + + # Mock file reading + test_schema = { + "fields": [{"fieldName": "id", "fieldType": "string"}], + "format": "Delta" + } + + with patch('builtins.open', unittest.mock.mock_open(read_data=json.dumps(test_schema))): + # Execute with CPK parameters + result = frontend_collaboration_dataset_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-dataset-cpk", + body=None, + storage_account_url="https://mystorageaccount.blob.core.windows.net", + container_name="datasets", + storage_account_type="AzureStorageAccount", + encryption_mode="CPK", + schema_file="@schema.json", + schema_format=None, + access_mode="ReadWrite", + allowed_fields=None, + identity_name="northwind-identity", + identity_client_id="fb907136-1234-5678-9abc-def012345678", + identity_tenant_id="72f988bf-1234-5678-9abc-def012345678", + identity_issuer_url="https://oidc.example.com/issuer", + dek_keyvault_url="https://mykeyvault.vault.azure.net", + dek_secret_id="dek-secret-123", + kek_keyvault_url="https://mykeyvault.vault.azure.net", + kek_secret_id="kek-secret-123", + kek_maa_url="https://sharedeus.eus.attest.azure.net" + ) + + # Verify + self.assertEqual(result["datasetId"], "test-dataset-cpk") + self.assertEqual(result["status"], "published") + + # Verify CPK fields are present in body + call_args = mock_client.collaboration.analytics_datasets_document_id_publish_post.call_args + body = call_args[0][2] + self.assertIn("dek", body) + self.assertEqual(body["dek"]["keyVaultUrl"], "https://mykeyvault.vault.azure.net") + self.assertIn("kek", body) + self.assertEqual(body["kek"]["secretId"], "kek-secret-123") + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_dataset_mutual_exclusion(self, mock_get_client): + """Test that body and parameters are mutually exclusive""" + from azure.cli.core.util import CLIError + + mock_client = Mock() + mock_get_client.return_value = mock_client + + # Execute with both body and parameters - should raise error + with self.assertRaises(CLIError) as context: + frontend_collaboration_dataset_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-dataset-123", + body={"data": "test"}, + storage_account_url="https://mystorageaccount.blob.core.windows.net", + container_name="datasets" + ) + + self.assertIn("Cannot use --body together with individual parameters", str(context.exception)) + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_dataset_missing_required_parameters(self, mock_get_client): + """Test validation of required parameters""" + from azure.cli.core.util import CLIError + + mock_client = Mock() + mock_get_client.return_value = mock_client + + # Execute with missing required parameters - should raise error + with self.assertRaises(CLIError) as context: + frontend_collaboration_dataset_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-dataset-123", + body=None, + storage_account_url="https://mystorageaccount.blob.core.windows.net", + # Missing container_name and other required params + ) + + self.assertIn("Missing required parameters", str(context.exception)) + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_dataset_cpk_missing_keys(self, mock_get_client): + """Test CPK mode requires DEK/KEK parameters""" + from azure.cli.core.util import CLIError + + mock_client = Mock() + mock_get_client.return_value = mock_client + + # Mock file reading + test_schema = {"fields": [], "format": "Delta"} + + with patch('builtins.open', unittest.mock.mock_open(read_data=json.dumps(test_schema))): + # Execute CPK mode without DEK/KEK params - should raise error + with self.assertRaises(CLIError) as context: + frontend_collaboration_dataset_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-dataset-123", + body=None, + storage_account_url="https://mystorageaccount.blob.core.windows.net", + container_name="datasets", + storage_account_type="AzureStorageAccount", + encryption_mode="CPK", + schema_file="@schema.json", + access_mode="ReadWrite", + identity_name="northwind-identity", + identity_client_id="fb907136-1234-5678-9abc-def012345678", + identity_tenant_id="72f988bf-1234-5678-9abc-def012345678", + identity_issuer_url="https://oidc.example.com/issuer" + # Missing DEK/KEK params + ) + + self.assertIn("CPK encryption mode requires", str(context.exception)) + + if __name__ == '__main__': unittest.main() diff --git a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py index d5718a92b71..be71e8133f1 100644 --- a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py +++ b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py @@ -257,6 +257,258 @@ def test_show_query_run_result(self, mock_get_client): mock_client.collaboration.analytics_queries_jobid_get.assert_called_once_with( "test-collab-123", "test-job-123") + # Query Publish with Parameters Tests + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_query_with_parameters_from_files(self, mock_get_client): + """Test publishing a query with SQL segments from files""" + # Mock the client + mock_client = Mock() + mock_client.collaboration.analytics_queries_document_id_publish_post.return_value = { + "queryId": "test-query-123", + "status": "published" + } + mock_get_client.return_value = mock_client + + # Mock file reading for SQL segments + sql_segment_1 = "SELECT * FROM table1" + sql_segment_2 = "SELECT * FROM table2" + sql_segment_3 = "SELECT * FROM table3" + + def mock_open_handler(filename, mode='r'): + content = { + 'segment1.sql': sql_segment_1, + 'segment2.sql': sql_segment_2, + 'segment3.sql': sql_segment_3 + } + file_content = content.get(filename, "") + return unittest.mock.mock_open(read_data=file_content)() + + with patch('builtins.open', side_effect=mock_open_handler): + # Execute + result = frontend_collaboration_query_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-query-123", + body=None, + query_segment=["@segment1.sql", "@segment2.sql", "@segment3.sql"], + execution_sequence="1,1,2", + input_datasets="dataset1:view1,dataset2:view2", + output_dataset="output-dataset:results" + ) + + # Verify + self.assertEqual(result["queryId"], "test-query-123") + self.assertEqual(result["status"], "published") + + # Verify body construction + call_args = mock_client.collaboration.analytics_queries_document_id_publish_post.call_args + body = call_args[0][2] + self.assertEqual(body["inputDatasets"], "dataset1:view1,dataset2:view2") + self.assertEqual(body["outputDataset"], "output-dataset:results") + self.assertEqual(len(body["queryData"]), 3) + self.assertEqual(body["queryData"][0]["data"], sql_segment_1) + self.assertEqual(body["queryData"][0]["executionSequence"], 1) + self.assertEqual(body["queryData"][2]["executionSequence"], 2) + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_query_with_inline_sql(self, mock_get_client): + """Test publishing a query with inline SQL segments""" + # Mock the client + mock_client = Mock() + mock_client.collaboration.analytics_queries_document_id_publish_post.return_value = { + "queryId": "test-query-inline", + "status": "published" + } + mock_get_client.return_value = mock_client + + # Execute with inline SQL + result = frontend_collaboration_query_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-query-inline", + body=None, + query_segment=["SELECT * FROM table1", "SELECT * FROM table2"], + execution_sequence="1,2", + input_datasets="dataset1:view1", + output_dataset="output-dataset:results" + ) + + # Verify + self.assertEqual(result["status"], "published") + + # Verify body construction + call_args = mock_client.collaboration.analytics_queries_document_id_publish_post.call_args + body = call_args[0][2] + self.assertEqual(len(body["queryData"]), 2) + self.assertEqual(body["queryData"][0]["data"], "SELECT * FROM table1") + self.assertEqual(body["queryData"][1]["executionSequence"], 2) + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_query_mutual_exclusion(self, mock_get_client): + """Test that body and parameters are mutually exclusive for query publish""" + from azure.cli.core.util import CLIError + + mock_client = Mock() + mock_get_client.return_value = mock_client + + # Execute with both body and parameters - should raise error + with self.assertRaises(CLIError) as context: + frontend_collaboration_query_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-query-123", + body={"queryData": []}, + query_segment=["SELECT * FROM table1"] + ) + + self.assertIn("Cannot use --body together with individual parameters", str(context.exception)) + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_query_segment_count_mismatch(self, mock_get_client): + """Test validation when segment count doesn't match execution sequence count""" + from azure.cli.core.util import CLIError + + mock_client = Mock() + mock_get_client.return_value = mock_client + + # Execute with mismatched counts - should raise error + with self.assertRaises(CLIError) as context: + frontend_collaboration_query_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-query-123", + body=None, + query_segment=["SELECT * FROM table1", "SELECT * FROM table2"], + execution_sequence="1,2,3", # 3 numbers for 2 segments + input_datasets="dataset1:view1", + output_dataset="output-dataset:results" + ) + + self.assertIn("must match execution sequence count", str(context.exception)) + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_query_invalid_dataset_format(self, mock_get_client): + """Test validation of dataset ID:view format""" + from azure.cli.core.util import CLIError + + mock_client = Mock() + mock_get_client.return_value = mock_client + + # Execute with invalid dataset format - should raise error + with self.assertRaises(CLIError) as context: + frontend_collaboration_query_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-query-123", + body=None, + query_segment=["SELECT * FROM table1"], + execution_sequence="1", + input_datasets="dataset1", # Missing :viewName + output_dataset="output-dataset:results" + ) + + self.assertIn("Invalid input dataset format", str(context.exception)) + + # Query Run with Parameters Tests + + @patch('uuid.uuid4') + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_run_query_with_parameters(self, mock_get_client, mock_uuid4): + """Test running a query with individual parameters""" + # Mock UUID generation + mock_uuid4.return_value = "generated-run-id-456" + + # Mock the client + mock_client = Mock() + mock_client.collaboration.analytics_queries_document_id_run_post.return_value = { + "runId": "generated-run-id-456", + "status": "running" + } + mock_get_client.return_value = mock_client + + # Execute with parameters + result = frontend_collaboration_query_run( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-query-123", + body=None, + dry_run=True, + start_date="2024-01-01", + end_date="2024-12-31", + use_optimizer=True + ) + + # Verify + self.assertEqual(result["runId"], "generated-run-id-456") + + # Verify body was constructed with parameters + call_args = mock_client.collaboration.analytics_queries_document_id_run_post.call_args + body = call_args[1]["body"] + self.assertEqual(body["dryRun"], True) + self.assertEqual(body["startDate"], "2024-01-01") + self.assertEqual(body["endDate"], "2024-12-31") + self.assertEqual(body["useOptimizer"], True) + self.assertEqual(body["runId"], "generated-run-id-456") + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_run_query_mutual_exclusion(self, mock_get_client): + """Test that body and parameters are mutually exclusive for query run""" + from azure.cli.core.util import CLIError + + mock_client = Mock() + mock_get_client.return_value = mock_client + + # Execute with both body and parameters - should raise error + with self.assertRaises(CLIError) as context: + frontend_collaboration_query_run( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-query-123", + body={"runId": "test-run-id"}, + dry_run=True + ) + + self.assertIn("Cannot use --body together with individual parameters", str(context.exception)) + + @patch('uuid.uuid4') + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_run_query_with_partial_parameters(self, mock_get_client, mock_uuid4): + """Test running a query with only some optional parameters""" + # Mock UUID generation + mock_uuid4.return_value = "generated-run-id-789" + + # Mock the client + mock_client = Mock() + mock_client.collaboration.analytics_queries_document_id_run_post.return_value = { + "runId": "generated-run-id-789", + "status": "running" + } + mock_get_client.return_value = mock_client + + # Execute with only dry_run parameter + result = frontend_collaboration_query_run( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-query-123", + body=None, + dry_run=True, + start_date=None, + end_date=None, + use_optimizer=False + ) + + # Verify + self.assertEqual(result["status"], "running") + + # Verify only dry_run is in body (not False boolean values) + call_args = mock_client.collaboration.analytics_queries_document_id_run_post.call_args + body = call_args[1]["body"] + self.assertEqual(body["dryRun"], True) + self.assertNotIn("startDate", body) + self.assertNotIn("endDate", body) + self.assertNotIn("useOptimizer", body) + if __name__ == '__main__': unittest.main() From 8715d0ddccdf885040c070b0a0433373bd415182 Mon Sep 17 00:00:00 2001 From: Saksham Garg Date: Fri, 20 Mar 2026 17:48:53 +0530 Subject: [PATCH 06/10] linter and styling fixes --- .../_frontend_custom.py | 55 +++++-- .../azext_managedcleanroom/_params.py | 147 ++++++++++++++---- 2 files changed, 156 insertions(+), 46 deletions(-) diff --git a/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py b/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py index 7210f9b1a47..2b5248f2334 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py +++ b/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py @@ -226,6 +226,7 @@ def frontend_collaboration_dataset_show( collaboration_id, document_id) +# pylint: disable=too-many-locals def frontend_collaboration_dataset_publish( cmd, collaboration_id, document_id, body=None, @@ -284,7 +285,9 @@ def frontend_collaboration_dataset_publish( ]) if body and has_params: - raise CLIError('Cannot use --body together with individual parameters. Use either --body or the parameter flags.') + raise CLIError( + 'Cannot use --body together with individual parameters. ' + 'Use either --body or the parameter flags.') # Legacy mode: use body directly if body: @@ -296,7 +299,9 @@ def frontend_collaboration_dataset_publish( # Parameter mode: construct body from parameters if not has_params: - raise CLIError('Either --body or individual parameters (--storage-account-url, --container-name, etc.) must be provided.') + raise CLIError( + 'Either --body or individual parameters (--storage-account-url, ' + '--container-name, etc.) must be provided.') # Validate required parameters required_params = { @@ -314,7 +319,8 @@ def frontend_collaboration_dataset_publish( missing = [k for k, v in required_params.items() if v is None] if missing: - raise CLIError(f'Missing required parameters: {", ".join(f"--{k.replace("_", "-")}" for k in missing)}') + missing_params = ", ".join(f"--{k.replace('_', '-')}" for k in missing) + raise CLIError(f'Missing required parameters: {missing_params}') # Validate CPK parameters if encryption_mode is CPK if encryption_mode and encryption_mode.upper() == 'CPK': @@ -327,7 +333,10 @@ def frontend_collaboration_dataset_publish( } missing_cpk = [k for k, v in cpk_params.items() if v is None] if missing_cpk: - raise CLIError(f'CPK encryption mode requires: {", ".join(f"--{k.replace("_", "-")}" for k in missing_cpk)}') + missing_cpk_params = ", ".join( + f"--{k.replace('_', '-')}" for k in missing_cpk) + raise CLIError( + f'CPK encryption mode requires: {missing_cpk_params}') # Load schema from file schema_content = None @@ -487,6 +496,7 @@ def frontend_collaboration_query_show( collaboration_id, document_id) +# pylint: disable=too-many-locals def frontend_collaboration_query_publish( cmd, collaboration_id, document_id, body=None, @@ -512,10 +522,13 @@ def frontend_collaboration_query_publish( from azure.cli.core.util import CLIError # Check for mutual exclusion: body vs parameters - has_params = any([query_segment, execution_sequence, input_datasets, output_dataset]) + has_params = any([ + query_segment, execution_sequence, input_datasets, output_dataset]) if body and has_params: - raise CLIError('Cannot use --body together with individual parameters. Use either --body or the parameter flags.') + raise CLIError( + 'Cannot use --body together with individual parameters. ' + 'Use either --body or the parameter flags.') # Legacy mode: use body directly if body: @@ -527,11 +540,14 @@ def frontend_collaboration_query_publish( # Parameter mode: construct body from parameters if not has_params: - raise CLIError('Either --body or individual parameters (--query-segment, --execution-sequence, etc.) must be provided.') + raise CLIError( + 'Either --body or individual parameters (--query-segment, ' + '--execution-sequence, etc.) must be provided.') # Validate required parameters if not query_segment: - raise CLIError('--query-segment is required (can be specified multiple times)') + raise CLIError( + '--query-segment is required (can be specified multiple times)') if not execution_sequence: raise CLIError('--execution-sequence is required') if not input_datasets: @@ -558,11 +574,15 @@ def frontend_collaboration_query_publish( try: exec_seq = [int(x.strip()) for x in execution_sequence.split(',')] except ValueError: - raise CLIError('--execution-sequence must be comma-separated integers (e.g., "1,1,2")') + raise CLIError( + '--execution-sequence must be comma-separated integers ' + '(e.g., "1,1,2")') # Validate segment count matches execution sequence count if len(segments) != len(exec_seq): - raise CLIError(f'Number of query segments ({len(segments)}) must match execution sequence count ({len(exec_seq)})') + raise CLIError( + f'Number of query segments ({len(segments)}) must match ' + f'execution sequence count ({len(exec_seq)})') # Build queryData array query_data = [] @@ -579,15 +599,19 @@ def frontend_collaboration_query_publish( for ds in input_datasets.split(','): ds = ds.strip() if ':' not in ds: - raise CLIError(f'Invalid input dataset format: {ds}. Expected format: datasetId:viewName') + raise CLIError( + f'Invalid input dataset format: {ds}. ' + f'Expected format: datasetId:viewName') dataset_id, view_name = ds.split(':', 1) input_ds_list.append(f'{dataset_id.strip()}:{view_name.strip()}') input_ds_str = ','.join(input_ds_list) # Parse output dataset if ':' not in output_dataset: - raise CLIError(f'Invalid output dataset format: {output_dataset}. Expected format: datasetId:viewName') - + raise CLIError( + f'Invalid output dataset format: {output_dataset}. ' + f'Expected format: datasetId:viewName') + # Construct body body = { 'inputDatasets': input_ds_str, @@ -600,7 +624,6 @@ def frontend_collaboration_query_publish( collaboration_id, document_id, body) - def frontend_collaboration_query_run( cmd, collaboration_id, @@ -633,7 +656,9 @@ def frontend_collaboration_query_run( has_params = any([dry_run, start_date, end_date, use_optimizer]) if body and has_params: - raise CLIError('Cannot use --body together with individual parameters. Use either --body or the parameter flags.') + raise CLIError( + 'Cannot use --body together with individual parameters. ' + 'Use either --body or the parameter flags.') # Handle body parameter - convert string to dict if needed if body and isinstance(body, str): diff --git a/src/managedcleanroom/azext_managedcleanroom/_params.py b/src/managedcleanroom/azext_managedcleanroom/_params.py index a48351b6040..1cb8dc3f74b 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_params.py +++ b/src/managedcleanroom/azext_managedcleanroom/_params.py @@ -116,36 +116,89 @@ def load_arguments(self, _): # pylint: disable=unused-argument 'body', type=str, help='JSON string or @file path containing publish configuration. ' - 'Must include datasetAccessPoint with name, path, and protection details.') - + 'Must include datasetAccessPoint with name, path, and ' + 'protection details.') + # Storage parameters - c.argument('storage_account_url', options_list=['--storage-account-url'], help='Azure Storage account URL') - c.argument('container_name', options_list=['--container-name'], help='Blob container name') - c.argument('storage_account_type', options_list=['--storage-account-type'], help='Storage account type (e.g., AzureStorageAccount)') - c.argument('encryption_mode', options_list=['--encryption-mode'], help='Encryption mode: SSE or CPK') + c.argument( + 'storage_account_url', + options_list=['--storage-account-url'], + help='Azure Storage account URL') + c.argument( + 'container_name', + options_list=['--container-name'], + help='Blob container name') + c.argument( + 'storage_account_type', + options_list=['--storage-account-type'], + help='Storage account type (e.g., AzureStorageAccount)') + c.argument( + 'encryption_mode', + options_list=['--encryption-mode'], + help='Encryption mode: SSE or CPK') # Schema parameters - c.argument('schema_file', options_list=['--schema-file'], help='Path to schema file (@path/to/schema.json) containing field definitions') - c.argument('schema_format', options_list=['--schema-format'], help='Schema format (default: Delta)') + c.argument( + 'schema_file', + options_list=['--schema-file'], + help='Path to schema file (@path/to/schema.json) containing ' + 'field definitions') + c.argument( + 'schema_format', + options_list=['--schema-format'], + help='Schema format (default: Delta)') # Access policy parameters - c.argument('access_mode', options_list=['--access-mode'], help='Access mode (e.g., ReadWrite)') - c.argument('allowed_fields', options_list=['--allowed-fields'], help='Comma-separated list of allowed field names') + c.argument( + 'access_mode', + options_list=['--access-mode'], + help='Access mode (e.g., ReadWrite)') + c.argument( + 'allowed_fields', + options_list=['--allowed-fields'], + help='Comma-separated list of allowed field names') # Identity parameters - c.argument('identity_name', options_list=['--identity-name'], help='Managed identity name') - c.argument('identity_client_id', options_list=['--identity-client-id'], help='Managed identity client ID (GUID)') - c.argument('identity_tenant_id', options_list=['--identity-tenant-id'], help='Tenant ID (GUID)') - c.argument('identity_issuer_url', options_list=['--identity-issuer-url'], help='OIDC issuer URL (HTTPS)') + c.argument( + 'identity_name', + options_list=['--identity-name'], + help='Managed identity name') + c.argument( + 'identity_client_id', + options_list=['--identity-client-id'], + help='Managed identity client ID (GUID)') + c.argument( + 'identity_tenant_id', + options_list=['--identity-tenant-id'], + help='Tenant ID (GUID)') + c.argument( + 'identity_issuer_url', + options_list=['--identity-issuer-url'], + help='OIDC issuer URL (HTTPS)') # CPK DEK parameters - c.argument('dek_keyvault_url', options_list=['--dek-keyvault-url'], help='Key Vault URL for DEK (CPK mode only)') - c.argument('dek_secret_id', options_list=['--dek-secret-id'], help='DEK secret ID (CPK mode only)') + c.argument( + 'dek_keyvault_url', + options_list=['--dek-keyvault-url'], + help='Key Vault URL for DEK (CPK mode only)') + c.argument( + 'dek_secret_id', + options_list=['--dek-secret-id'], + help='DEK secret ID (CPK mode only)') # CPK KEK parameters - c.argument('kek_keyvault_url', options_list=['--kek-keyvault-url'], help='Key Vault URL for KEK (CPK mode only)') - c.argument('kek_secret_id', options_list=['--kek-secret-id'], help='KEK secret ID (CPK mode only)') - c.argument('kek_maa_url', options_list=['--kek-maa-url'], help='MAA URL for KEK (CPK mode only)') + c.argument( + 'kek_keyvault_url', + options_list=['--kek-keyvault-url'], + help='Key Vault URL for KEK (CPK mode only)') + c.argument( + 'kek_secret_id', + options_list=['--kek-secret-id'], + help='KEK secret ID (CPK mode only)') + c.argument( + 'kek_maa_url', + options_list=['--kek-maa-url'], + help='MAA URL for KEK (CPK mode only)') # Dataset queries context with self.argument_context('managedcleanroom frontend analytics dataset queries') as c: @@ -173,22 +226,54 @@ def load_arguments(self, _): # pylint: disable=unused-argument type=str, help='JSON string or @file path containing publish configuration. ' 'Must include inputDatasets, outputDataset, and queryData.') - - c.argument('query_segment', options_list=['--query-segment'], action='append', help='Query segment SQL (@file.sql or inline). Repeatable. Order matters.') - c.argument('execution_sequence', options_list=['--execution-sequence'], help='Comma-separated execution sequence numbers (e.g., "1,1,2"). Must match segment count.') - c.argument('input_datasets', options_list=['--input-datasets'], help='Comma-separated input datasets as datasetId:viewName pairs') - c.argument('output_dataset', options_list=['--output-dataset'], help='Output dataset as datasetId:viewName') + + c.argument( + 'query_segment', + options_list=['--query-segment'], + action='append', + help='Query segment SQL (@file.sql or inline). ' + 'Repeatable. Order matters.') + c.argument( + 'execution_sequence', + options_list=['--execution-sequence'], + help='Comma-separated execution sequence numbers (e.g., "1,1,2"). ' + 'Must match segment count.') + c.argument( + 'input_datasets', + options_list=['--input-datasets'], + help='Comma-separated input datasets as datasetId:viewName pairs') + c.argument( + 'output_dataset', + options_list=['--output-dataset'], + help='Output dataset as datasetId:viewName') with self.argument_context('managedcleanroom frontend analytics query run') as c: c.argument('document_id', document_id_type) c.argument( - 'body', type=str, help='JSON string or @file path containing run configuration. ' - 'Optional fields: runId (auto-generated if not provided), dryRun, startDate, endDate, useOptimizer.') - - c.argument('dry_run', options_list=['--dry-run'], action='store_true', help='Perform a dry run without executing the query') - c.argument('start_date', options_list=['--start-date'], help='Start date for query execution') - c.argument('end_date', options_list=['--end-date'], help='End date for query execution') - c.argument('use_optimizer', options_list=['--use-optimizer'], action='store_true', help='Use query optimizer') + 'body', + type=str, + help='JSON string or @file path containing run configuration. ' + 'Optional fields: runId (auto-generated if not provided), ' + 'dryRun, startDate, endDate, useOptimizer.') + + c.argument( + 'dry_run', + options_list=['--dry-run'], + action='store_true', + help='Perform a dry run without executing the query') + c.argument( + 'start_date', + options_list=['--start-date'], + help='Start date for query execution') + c.argument( + 'end_date', + options_list=['--end-date'], + help='End date for query execution') + c.argument( + 'use_optimizer', + options_list=['--use-optimizer'], + action='store_true', + help='Use query optimizer') # Query vote context (unified) with self.argument_context('managedcleanroom frontend analytics query vote') as c: From 25db468a42d3f9a083e2b4f665cd0a04801ee84c Mon Sep 17 00:00:00 2001 From: Saksham Garg Date: Mon, 23 Mar 2026 13:48:15 +0530 Subject: [PATCH 07/10] update query segments to be json objects --- .../_frontend_custom.py | 104 +++++++++---- .../azext_managedcleanroom/_help.py | 42 ++--- .../azext_managedcleanroom/_params.py | 8 +- .../tests/latest/test_frontend_query.py | 144 ++++++++++++++++-- 4 files changed, 231 insertions(+), 67 deletions(-) diff --git a/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py b/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py index 2b5248f2334..5ca97afba3c 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py +++ b/src/managedcleanroom/azext_managedcleanroom/_frontend_custom.py @@ -496,7 +496,7 @@ def frontend_collaboration_query_show( collaboration_id, document_id) -# pylint: disable=too-many-locals +# pylint: disable=too-many-locals,too-many-branches def frontend_collaboration_query_publish( cmd, collaboration_id, document_id, body=None, @@ -548,51 +548,89 @@ def frontend_collaboration_query_publish( if not query_segment: raise CLIError( '--query-segment is required (can be specified multiple times)') - if not execution_sequence: - raise CLIError('--execution-sequence is required') if not input_datasets: raise CLIError('--input-datasets is required') if not output_dataset: raise CLIError('--output-dataset is required') - # Parse query segments - segments = [] - for seg in query_segment: - if seg.startswith('@'): - # Load from file + # Parse query segments - detect mode (FILE vs INLINE) + file_segments = [s for s in query_segment if s.startswith('@')] + inline_segments = [s for s in query_segment if not s.startswith('@')] + + # Cannot mix file and inline segments + if file_segments and inline_segments: + raise CLIError( + 'Cannot mix @file.json and inline SQL segments. ' + 'Either use all @file.json segments or all inline SQL strings.') + + query_data = [] + + if file_segments: + # FILE mode: segments are JSON files with full object structure + if execution_sequence: + raise CLIError( + '--execution-sequence must not be provided when using ' + '@file.json segments. Include executionSequence inside ' + 'each JSON file.') + + for seg in file_segments: file_path = seg[1:] try: with open(file_path, 'r') as f: - segments.append(f.read()) + segment_obj = json.load(f) except FileNotFoundError: raise CLIError(f'Query segment file not found: {file_path}') - else: - # Inline SQL - segments.append(seg) + except json.JSONDecodeError as e: + raise CLIError( + f'Invalid JSON in segment file {file_path}: {str(e)}') + + # Validate required fields + if 'data' not in segment_obj: + raise CLIError( + f'Segment file {file_path} must contain "data" field') + if 'executionSequence' not in segment_obj: + raise CLIError( + f'Segment file {file_path} must contain ' + f'"executionSequence" field') + + # Build segment with defaults for optional fields + query_data.append({ + 'data': segment_obj['data'], + 'executionSequence': segment_obj['executionSequence'], + 'preConditions': segment_obj.get('preConditions', ''), + 'postFilters': segment_obj.get('postFilters', '') + }) - # Parse execution sequence - try: - exec_seq = [int(x.strip()) for x in execution_sequence.split(',')] - except ValueError: - raise CLIError( - '--execution-sequence must be comma-separated integers ' - '(e.g., "1,1,2")') + else: + # INLINE mode: segments are raw SQL strings + if not execution_sequence: + raise CLIError( + '--execution-sequence is required when using inline SQL ' + 'segments.') - # Validate segment count matches execution sequence count - if len(segments) != len(exec_seq): - raise CLIError( - f'Number of query segments ({len(segments)}) must match ' - f'execution sequence count ({len(exec_seq)})') + # Parse execution sequence + try: + exec_seq = [int(x.strip()) + for x in execution_sequence.split(',')] + except ValueError: + raise CLIError( + '--execution-sequence must be comma-separated integers ' + '(e.g., "1,1,2")') - # Build queryData array - query_data = [] - for sql, seq in zip(segments, exec_seq): - query_data.append({ - 'data': sql, - 'executionSequence': seq, - 'preConditions': '', - 'postFilters': '' - }) + # Validate segment count matches execution sequence count + if len(inline_segments) != len(exec_seq): + raise CLIError( + f'Number of query segments ({len(inline_segments)}) must ' + f'match execution sequence count ({len(exec_seq)})') + + # Build queryData array from inline SQL + for sql, seq in zip(inline_segments, exec_seq): + query_data.append({ + 'data': sql, + 'executionSequence': seq, + 'preConditions': '', + 'postFilters': '' + }) # Parse input datasets (comma-separated datasetId:viewName pairs) input_ds_list = [] diff --git a/src/managedcleanroom/azext_managedcleanroom/_help.py b/src/managedcleanroom/azext_managedcleanroom/_help.py index a8423a30cc9..6186b2ea1c3 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_help.py +++ b/src/managedcleanroom/azext_managedcleanroom/_help.py @@ -520,8 +520,10 @@ short-summary: Publish a query to the collaboration long-summary: | Publish a query configuration with SQL segments, execution sequence, and dataset mappings. - Query segments can be loaded from files or provided inline. The execution sequence defines - which segments run in parallel (same number) or sequentially (different numbers). + Query segments can be provided as @file.json (full segment object including executionSequence) + or as inline SQL strings (requires --execution-sequence parameter). The execution sequence + defines which segments run in parallel (same number) or sequentially (different numbers). + Cannot mix @file.json and inline SQL segments in the same command. parameters: - name: --collaboration-id -c type: string @@ -545,26 +547,28 @@ type: string short-summary: Output dataset as datasetId:viewName examples: - - name: Publish a query with SQL segments from files + - name: Publish a query with segments from JSON files (segment files include executionSequence) text: | - az managedcleanroom frontend analytics query publish \ - --collaboration-id my-collab-123 \ - --document-id my-query \ - --query-segment @segment1.sql \ - --query-segment @segment2.sql \ - --query-segment @segment3.sql \ - --execution-sequence "1,1,2" \ - --input-datasets "dataset1:view1,dataset2:view2" \ + az managedcleanroom frontend analytics query publish \\ + --collaboration-id my-collab-123 \\ + --document-id my-query \\ + --query-segment @segment1.json \\ + --query-segment @segment2.json \\ + --query-segment @segment3.json \\ + --input-datasets "dataset1:view1,dataset2:view2" \\ --output-dataset "output-dataset:results" - - name: Publish a query with inline SQL + + Note: Each segment JSON file should contain: + data, executionSequence, preConditions, postFilters fields + - name: Publish a query with inline SQL segments (requires --execution-sequence) text: | - az managedcleanroom frontend analytics query publish \ - --collaboration-id my-collab-123 \ - --document-id my-query \ - --query-segment "SELECT * FROM table1" \ - --query-segment "SELECT * FROM table2" \ - --execution-sequence "1,2" \ - --input-datasets "dataset1:view1" \ + az managedcleanroom frontend analytics query publish \\ + --collaboration-id my-collab-123 \\ + --document-id my-query \\ + --query-segment "SELECT * FROM table1" \\ + --query-segment "SELECT * FROM table2" \\ + --execution-sequence "1,2" \\ + --input-datasets "dataset1:view1" \\ --output-dataset "output-dataset:results" - name: Publish a query using a JSON body file (legacy mode) text: | diff --git a/src/managedcleanroom/azext_managedcleanroom/_params.py b/src/managedcleanroom/azext_managedcleanroom/_params.py index 1cb8dc3f74b..9594acfe9c2 100644 --- a/src/managedcleanroom/azext_managedcleanroom/_params.py +++ b/src/managedcleanroom/azext_managedcleanroom/_params.py @@ -231,13 +231,15 @@ def load_arguments(self, _): # pylint: disable=unused-argument 'query_segment', options_list=['--query-segment'], action='append', - help='Query segment SQL (@file.sql or inline). ' - 'Repeatable. Order matters.') + help='Query segment: @file.json (full segment object with data, ' + 'executionSequence, preConditions, postFilters) or inline SQL string. ' + 'Repeatable. Order matters. Cannot mix @file.json and inline segments.') c.argument( 'execution_sequence', options_list=['--execution-sequence'], help='Comma-separated execution sequence numbers (e.g., "1,1,2"). ' - 'Must match segment count.') + 'Required for inline SQL segments. Not used with @file.json segments ' + '(include executionSequence in each JSON file instead).') c.argument( 'input_datasets', options_list=['--input-datasets'], diff --git a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py index be71e8133f1..8fcb3739935 100644 --- a/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py +++ b/src/managedcleanroom/azext_managedcleanroom/tests/latest/test_frontend_query.py @@ -8,6 +8,7 @@ Tests query CRUD, execution, voting, and run history commands from _frontend_custom.py. """ +import json import unittest from unittest.mock import Mock, patch from azext_managedcleanroom._frontend_custom import ( @@ -261,7 +262,7 @@ def test_show_query_run_result(self, mock_get_client): @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') def test_publish_query_with_parameters_from_files(self, mock_get_client): - """Test publishing a query with SQL segments from files""" + """Test publishing a query with SQL segments from JSON files""" # Mock the client mock_client = Mock() mock_client.collaboration.analytics_queries_document_id_publish_post.return_value = { @@ -270,16 +271,31 @@ def test_publish_query_with_parameters_from_files(self, mock_get_client): } mock_get_client.return_value = mock_client - # Mock file reading for SQL segments - sql_segment_1 = "SELECT * FROM table1" - sql_segment_2 = "SELECT * FROM table2" - sql_segment_3 = "SELECT * FROM table3" + # Mock file reading for JSON segment files + segment_1 = { + "data": "SELECT * FROM table1", + "executionSequence": 1, + "preConditions": "", + "postFilters": "" + } + segment_2 = { + "data": "SELECT * FROM table2", + "executionSequence": 1, + "preConditions": "", + "postFilters": "" + } + segment_3 = { + "data": "SELECT * FROM table3", + "executionSequence": 2, + "preConditions": "", + "postFilters": "" + } def mock_open_handler(filename, mode='r'): content = { - 'segment1.sql': sql_segment_1, - 'segment2.sql': sql_segment_2, - 'segment3.sql': sql_segment_3 + 'segment1.json': json.dumps(segment_1), + 'segment2.json': json.dumps(segment_2), + 'segment3.json': json.dumps(segment_3) } file_content = content.get(filename, "") return unittest.mock.mock_open(read_data=file_content)() @@ -291,8 +307,8 @@ def mock_open_handler(filename, mode='r'): collaboration_id="test-collab-123", document_id="test-query-123", body=None, - query_segment=["@segment1.sql", "@segment2.sql", "@segment3.sql"], - execution_sequence="1,1,2", + query_segment=["@segment1.json", "@segment2.json", "@segment3.json"], + execution_sequence=None, input_datasets="dataset1:view1,dataset2:view2", output_dataset="output-dataset:results" ) @@ -301,13 +317,13 @@ def mock_open_handler(filename, mode='r'): self.assertEqual(result["queryId"], "test-query-123") self.assertEqual(result["status"], "published") - # Verify body construction + # Verify body construction - segments were parsed from JSON call_args = mock_client.collaboration.analytics_queries_document_id_publish_post.call_args body = call_args[0][2] self.assertEqual(body["inputDatasets"], "dataset1:view1,dataset2:view2") self.assertEqual(body["outputDataset"], "output-dataset:results") self.assertEqual(len(body["queryData"]), 3) - self.assertEqual(body["queryData"][0]["data"], sql_segment_1) + self.assertEqual(body["queryData"][0]["data"], "SELECT * FROM table1") self.assertEqual(body["queryData"][0]["executionSequence"], 1) self.assertEqual(body["queryData"][2]["executionSequence"], 2) @@ -387,6 +403,110 @@ def test_publish_query_segment_count_mismatch(self, mock_get_client): self.assertIn("must match execution sequence count", str(context.exception)) + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_query_file_mode_rejects_exec_seq(self, mock_get_client): + """Test that FILE mode raises error if --execution-sequence is provided""" + from azure.cli.core.util import CLIError + + mock_client = Mock() + mock_get_client.return_value = mock_client + + segment_json = json.dumps({ + "data": "SELECT * FROM table1", + "executionSequence": 1, + "preConditions": "", + "postFilters": "" + }) + + with patch('builtins.open', unittest.mock.mock_open(read_data=segment_json)): + with self.assertRaises(CLIError) as context: + frontend_collaboration_query_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-query-123", + body=None, + query_segment=["@segment1.json"], + execution_sequence="1", # Should raise error + input_datasets="dataset1:view1", + output_dataset="output-dataset:results" + ) + + self.assertIn("must not be provided when using @file.json", str(context.exception)) + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_query_inline_mode_requires_exec_seq(self, mock_get_client): + """Test that INLINE mode requires --execution-sequence""" + from azure.cli.core.util import CLIError + + mock_client = Mock() + mock_get_client.return_value = mock_client + + with self.assertRaises(CLIError) as context: + frontend_collaboration_query_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-query-123", + body=None, + query_segment=["SELECT * FROM table1"], + execution_sequence=None, # Should raise error + input_datasets="dataset1:view1", + output_dataset="output-dataset:results" + ) + + self.assertIn("required when using inline SQL", str(context.exception)) + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_query_disallows_mixed_segments(self, mock_get_client): + """Test that mixing @file.json and inline segments raises error""" + from azure.cli.core.util import CLIError + + mock_client = Mock() + mock_get_client.return_value = mock_client + + with self.assertRaises(CLIError) as context: + frontend_collaboration_query_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-query-123", + body=None, + query_segment=["@segment1.json", "SELECT * FROM table2"], # Mixed + execution_sequence="1,2", + input_datasets="dataset1:view1", + output_dataset="output-dataset:results" + ) + + self.assertIn("Cannot mix @file.json and inline SQL", str(context.exception)) + + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') + def test_publish_query_file_missing_execution_sequence(self, mock_get_client): + """Test that segment JSON file must contain executionSequence""" + from azure.cli.core.util import CLIError + + mock_client = Mock() + mock_get_client.return_value = mock_client + + # Segment JSON missing executionSequence field + segment_json = json.dumps({ + "data": "SELECT * FROM table1", + "preConditions": "", + "postFilters": "" + }) + + with patch('builtins.open', unittest.mock.mock_open(read_data=segment_json)): + with self.assertRaises(CLIError) as context: + frontend_collaboration_query_publish( + cmd=Mock(), + collaboration_id="test-collab-123", + document_id="test-query-123", + body=None, + query_segment=["@segment1.json"], + execution_sequence=None, + input_datasets="dataset1:view1", + output_dataset="output-dataset:results" + ) + + self.assertIn('must contain "executionSequence"', str(context.exception)) + @patch('azext_managedcleanroom._frontend_custom.get_frontend_client') def test_publish_query_invalid_dataset_format(self, mock_get_client): """Test validation of dataset ID:view format""" From d405bbe83662918ba11010cdcf0f7ec025f87e09 Mon Sep 17 00:00:00 2001 From: Saksham Garg Date: Tue, 24 Mar 2026 12:13:27 +0530 Subject: [PATCH 08/10] Remove extension-specific entries from global .gitignore - Removed .opencode/ and personal helper scripts from global .gitignore - These are extension-specific development files that should not be in the global ignore list - Files will remain untracked locally without affecting the repository Addresses PR review feedback --- .gitignore | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/.gitignore b/.gitignore index 748a8c2d506..7ff6ec7e532 100644 --- a/.gitignore +++ b/.gitignore @@ -127,10 +127,4 @@ _az_debug/ # Ignore test results test_results.xml -.opencode/ -docs/agents.md -scripts/autofix_cleanroom.py -scripts/autofix_managedcleanroom.sh -scripts/validate_managedcleanroom.sh -src/managedcleanroom/cmdlet_testing_status_detailed.csv -src/managedcleanroom/cmdlet_testing_status.xlsx + From 2635328b346335036be6b8ffbff709dd23689c50 Mon Sep 17 00:00:00 2001 From: sakshamgargMS Date: Tue, 24 Mar 2026 12:16:51 +0530 Subject: [PATCH 09/10] Update .gitignore --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index 7ff6ec7e532..3e9d06ca5e3 100644 --- a/.gitignore +++ b/.gitignore @@ -127,4 +127,3 @@ _az_debug/ # Ignore test results test_results.xml - From 8fbe0c51431574c4bf1f4fe33cc9478b721b66c9 Mon Sep 17 00:00:00 2001 From: Saksham Garg Date: Fri, 27 Mar 2026 11:50:16 +0530 Subject: [PATCH 10/10] update extension version --- src/managedcleanroom/HISTORY.rst | 29 ++++++++++++++--------------- src/managedcleanroom/setup.py | 2 +- 2 files changed, 15 insertions(+), 16 deletions(-) diff --git a/src/managedcleanroom/HISTORY.rst b/src/managedcleanroom/HISTORY.rst index 891d0d9f3c1..4447c820abe 100644 --- a/src/managedcleanroom/HISTORY.rst +++ b/src/managedcleanroom/HISTORY.rst @@ -3,7 +3,20 @@ Release History =============== +1.0.0b1 ++++++++ +* Initial release. + +1.0.0b2 ++++++++ +* Add frontend commandlets +* Add MSAL device code flow authentication + 1.0.0b3 +++++++ +* Update commands to reflect new API version 2026-03-31-preview + +1.0.0b4 +++++++ * Updated to latest Frontend API spec from develop branch (2026-03-01-preview) * Regenerated analytics_frontend_api SDK with updated method signatures @@ -33,18 +46,4 @@ Release History * Updated: Added --active-only filter to collaboration list and show commands * Updated: Added --pending-only filter to invitation list command * Updated: Added --scope, --from-seqno, --to-seqno filters to audit event list command -* Updated: Response structures modernized (many list endpoints now return structured objects with value arrays) -* Remove empty enum value from ApplicationState schema - -1.0.0b1 -+++++++ -* Initial release. - -1.0.0b2 -+++++++ -* Add frontend commandlets -* Add MSAL device code flow authentication - -1.0.0b3 -++++++ -* Update commands to reflect new API version 2026-03-31-preview \ No newline at end of file +* Updated: Response structures modernized (many list endpoints now return structured objects with value arrays) \ No newline at end of file diff --git a/src/managedcleanroom/setup.py b/src/managedcleanroom/setup.py index b2fce87ebba..1c7d7a36b43 100644 --- a/src/managedcleanroom/setup.py +++ b/src/managedcleanroom/setup.py @@ -10,7 +10,7 @@ # HISTORY.rst entry. -VERSION = '1.0.0b3' +VERSION = '1.0.0b4' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers