diff --git a/.mock/definition/exportStorage/azureSpi.yml b/.mock/definition/exportStorage/azureSpi.yml new file mode 100644 index 000000000..962f93d34 --- /dev/null +++ b/.mock/definition/exportStorage/azureSpi.yml @@ -0,0 +1,346 @@ +imports: + root: ../__package__.yml +service: + auth: false + base-path: '' + endpoints: + list: + path: /api/storages/export/azure_spi + method: GET + auth: true + docs: >- + Get a list of all Azure export storage connections that were set up with + Service Principal authentication. + source: + openapi: openapi/openapi.yaml + display-name: Get all Azure SPI export storage + request: + name: AzureSpiListRequest + query-parameters: + ordering: + type: optional + docs: Which field to use when ordering the results. + project: + type: optional + docs: Project ID + response: + docs: '' + type: list + examples: + - response: + body: + - account_name: account_name + can_delete_objects: true + client_id: client_id + client_secret: client_secret + container: container + created_at: '2024-01-15T09:30:00Z' + description: description + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + tenant_id: tenant_id + title: title + traceback: traceback + type: type + use_blob_urls: true + user_delegation_key: user_delegation_key + audiences: + - public + create: + path: /api/storages/export/azure_spi + method: POST + auth: true + docs: >- + Create an Azure export storage connection with Service Principal + authentication to store annotations. + source: + openapi: openapi/openapi.yaml + display-name: Create Azure export storage with SPI authentication + request: + body: root.AzureServicePrincipalExportStorageRequest + content-type: application/json + response: + docs: '' + type: root.AzureServicePrincipalExportStorage + examples: + - request: + project: 1 + response: + body: + account_name: account_name + can_delete_objects: true + client_id: client_id + client_secret: client_secret + container: container + created_at: '2024-01-15T09:30:00Z' + description: description + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + tenant_id: tenant_id + title: title + traceback: traceback + type: type + use_blob_urls: true + user_delegation_key: user_delegation_key + audiences: + - public + validate: + path: /api/storages/export/azure_spi/validate + method: POST + auth: true + docs: >- + Validate a specific Azure export storage connection that was set up with + Service Principal authentication. + source: + openapi: openapi/openapi.yaml + display-name: Validate Azure SPI export storage + request: + body: root.AzureServicePrincipalExportStorageRequest + content-type: application/json + examples: + - request: + project: 1 + audiences: + - public + get: + path: /api/storages/export/azure_spi/{id} + method: GET + auth: true + docs: >- + Get a specific Azure export storage connection that was set up with + Service Principal authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Get Azure SPI export storage + response: + docs: '' + type: root.AzureServicePrincipalExportStorage + examples: + - path-parameters: + id: 1 + response: + body: + account_name: account_name + can_delete_objects: true + client_id: client_id + client_secret: client_secret + container: container + created_at: '2024-01-15T09:30:00Z' + description: description + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + tenant_id: tenant_id + title: title + traceback: traceback + type: type + use_blob_urls: true + user_delegation_key: user_delegation_key + audiences: + - public + delete: + path: /api/storages/export/azure_spi/{id} + method: DELETE + auth: true + docs: >- + Delete a specific Azure export storage connection that was set up with + Service Principal authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Delete Azure SPI export storage + examples: + - path-parameters: + id: 1 + audiences: + - public + update: + path: /api/storages/export/azure_spi/{id} + method: PATCH + auth: true + docs: >- + Update a specific Azure export storage connection that was set up with + Service Principal authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Update Azure SPI export storage + request: + name: PatchedAzureServicePrincipalExportStorageRequest + body: + properties: + account_name: + type: optional + docs: Azure Blob account name + can_delete_objects: + type: optional + docs: Deletion from storage enabled + client_id: + type: optional + docs: Azure Blob Service Principal Client ID + client_secret: + type: optional + docs: Azure Blob Service Principal Client Secret + container: + type: optional + docs: Azure blob container + description: + type: optional + docs: Cloud storage description + last_sync: + type: optional + docs: Last sync finished time + last_sync_count: + type: optional + docs: Count of tasks synced last time + validation: + min: 0 + max: 2147483647 + last_sync_job: + type: optional + docs: Last sync job ID + validation: + maxLength: 256 + meta: optional + prefix: + type: optional + docs: Azure blob prefix name + project: + type: optional + docs: A unique integer value identifying this project. + regex_filter: + type: optional + docs: Cloud storage regex for filtering objects + status: optional + synchronizable: + type: optional + default: true + tenant_id: + type: optional + docs: Azure Tenant ID + title: + type: optional + docs: Cloud storage title + validation: + maxLength: 256 + traceback: + type: optional + docs: Traceback report for the last failed sync + use_blob_urls: + type: optional + docs: Interpret objects as BLOBs and generate URLs + user_delegation_key: + type: optional + docs: User Delegation Key (Backend) + content-type: application/json + response: + docs: '' + type: root.AzureServicePrincipalExportStorage + examples: + - path-parameters: + id: 1 + request: {} + response: + body: + account_name: account_name + can_delete_objects: true + client_id: client_id + client_secret: client_secret + container: container + created_at: '2024-01-15T09:30:00Z' + description: description + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + tenant_id: tenant_id + title: title + traceback: traceback + type: type + use_blob_urls: true + user_delegation_key: user_delegation_key + audiences: + - public + sync: + path: /api/storages/export/azure_spi/{id}/sync + method: POST + auth: true + docs: Sync tasks from an Azure SPI export storage. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Sync Azure SPI export storage + response: + docs: '' + type: root.AzureServicePrincipalExportStorage + examples: + - path-parameters: + id: 1 + response: + body: + account_name: account_name + can_delete_objects: true + client_id: client_id + client_secret: client_secret + container: container + created_at: '2024-01-15T09:30:00Z' + description: description + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + tenant_id: tenant_id + title: title + traceback: traceback + type: type + use_blob_urls: true + user_delegation_key: user_delegation_key + audiences: + - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/exportStorage/gcswif.yml b/.mock/definition/exportStorage/gcswif.yml new file mode 100644 index 000000000..5cf7e8d1c --- /dev/null +++ b/.mock/definition/exportStorage/gcswif.yml @@ -0,0 +1,354 @@ +imports: + root: ../__package__.yml +service: + auth: false + base-path: '' + endpoints: + list: + path: /api/storages/export/gcswif + method: GET + auth: true + docs: >- + Get a list of all GCS export storage connections that were set up with + WIF authentication. + source: + openapi: openapi/openapi.yaml + display-name: Get all GCS WIF export storage + request: + name: GcswifListRequest + query-parameters: + ordering: + type: optional + docs: Which field to use when ordering the results. + project: + type: optional + docs: Project ID + response: + docs: '' + type: list + examples: + - response: + body: + - bucket: bucket + can_delete_objects: true + created_at: '2024-01-15T09:30:00Z' + description: description + google_application_credentials: google_application_credentials + google_project_id: google_project_id + google_project_number: google_project_number + google_service_account_email: google_service_account_email + google_wif_pool_id: google_wif_pool_id + google_wif_provider_id: google_wif_provider_id + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + title: title + traceback: traceback + type: type + use_blob_urls: true + audiences: + - public + create: + path: /api/storages/export/gcswif + method: POST + auth: true + docs: >- + Create an GCS export storage connection with WIF authentication to store + annotations. + source: + openapi: openapi/openapi.yaml + display-name: Create GCS export storage with WIF authentication + request: + body: root.GcswifExportStorageRequest + content-type: application/json + response: + docs: '' + type: root.GcswifExportStorage + examples: + - request: + project: 1 + response: + body: + bucket: bucket + can_delete_objects: true + created_at: '2024-01-15T09:30:00Z' + description: description + google_application_credentials: google_application_credentials + google_project_id: google_project_id + google_project_number: google_project_number + google_service_account_email: google_service_account_email + google_wif_pool_id: google_wif_pool_id + google_wif_provider_id: google_wif_provider_id + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + title: title + traceback: traceback + type: type + use_blob_urls: true + audiences: + - public + validate: + path: /api/storages/export/gcswif/validate + method: POST + auth: true + docs: >- + Validate a specific GCS export storage connection that was set up with + WIF authentication. + source: + openapi: openapi/openapi.yaml + display-name: Validate GCS WIF export storage + request: + body: root.GcswifExportStorageRequest + content-type: application/json + examples: + - request: + project: 1 + audiences: + - public + get: + path: /api/storages/export/gcswif/{id} + method: GET + auth: true + docs: >- + Get a specific GCS export storage connection that was set up with WIF + authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Get GCS WIF export storage + response: + docs: '' + type: root.GcswifExportStorage + examples: + - path-parameters: + id: 1 + response: + body: + bucket: bucket + can_delete_objects: true + created_at: '2024-01-15T09:30:00Z' + description: description + google_application_credentials: google_application_credentials + google_project_id: google_project_id + google_project_number: google_project_number + google_service_account_email: google_service_account_email + google_wif_pool_id: google_wif_pool_id + google_wif_provider_id: google_wif_provider_id + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + title: title + traceback: traceback + type: type + use_blob_urls: true + audiences: + - public + delete: + path: /api/storages/export/gcswif/{id} + method: DELETE + auth: true + docs: >- + Delete a specific GCS export storage connection that was set up with WIF + authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Delete GCS WIF export storage + examples: + - path-parameters: + id: 1 + audiences: + - public + update: + path: /api/storages/export/gcswif/{id} + method: PATCH + auth: true + docs: >- + Update a specific GCS export storage connection that was set up with WIF + authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Update GCS WIF export storage + request: + name: PatchedGcswifExportStorageRequest + body: + properties: + bucket: + type: optional + docs: GCS bucket name + can_delete_objects: + type: optional + docs: Deletion from storage enabled + description: + type: optional + docs: Cloud storage description + google_application_credentials: + type: optional + docs: The content of GOOGLE_APPLICATION_CREDENTIALS json file + google_project_id: + type: optional + docs: Google project ID + google_project_number: + type: optional + docs: Google project number + google_service_account_email: + type: optional + docs: Google service account email + google_wif_pool_id: + type: optional + docs: Google WIF pool ID + google_wif_provider_id: + type: optional + docs: Google WIF provider ID + last_sync: + type: optional + docs: Last sync finished time + last_sync_count: + type: optional + docs: Count of tasks synced last time + validation: + min: 0 + max: 2147483647 + last_sync_job: + type: optional + docs: Last sync job ID + validation: + maxLength: 256 + meta: optional + prefix: + type: optional + docs: GCS bucket prefix + project: + type: optional + docs: A unique integer value identifying this project. + regex_filter: + type: optional + docs: Cloud storage regex for filtering objects + status: optional + synchronizable: + type: optional + default: true + title: + type: optional + docs: Cloud storage title + validation: + maxLength: 256 + traceback: + type: optional + docs: Traceback report for the last failed sync + use_blob_urls: + type: optional + docs: Interpret objects as BLOBs and generate URLs + content-type: application/json + response: + docs: '' + type: root.GcswifExportStorage + examples: + - path-parameters: + id: 1 + request: {} + response: + body: + bucket: bucket + can_delete_objects: true + created_at: '2024-01-15T09:30:00Z' + description: description + google_application_credentials: google_application_credentials + google_project_id: google_project_id + google_project_number: google_project_number + google_service_account_email: google_service_account_email + google_wif_pool_id: google_wif_pool_id + google_wif_provider_id: google_wif_provider_id + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + title: title + traceback: traceback + type: type + use_blob_urls: true + audiences: + - public + sync: + path: /api/storages/export/gcswif/{id}/sync + method: POST + auth: true + docs: Sync tasks from an GCS WIF export storage. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Sync GCS WIF export storage + response: + docs: '' + type: root.GcswifExportStorage + examples: + - path-parameters: + id: 1 + response: + body: + bucket: bucket + can_delete_objects: true + created_at: '2024-01-15T09:30:00Z' + description: description + google_application_credentials: google_application_credentials + google_project_id: google_project_id + google_project_number: google_project_number + google_service_account_email: google_service_account_email + google_wif_pool_id: google_wif_pool_id + google_wif_provider_id: google_wif_provider_id + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + title: title + traceback: traceback + type: type + use_blob_urls: true + audiences: + - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/importStorage/azureSpi.yml b/.mock/definition/importStorage/azureSpi.yml new file mode 100644 index 000000000..f88cad861 --- /dev/null +++ b/.mock/definition/importStorage/azureSpi.yml @@ -0,0 +1,357 @@ +imports: + root: ../__package__.yml +service: + auth: false + base-path: '' + endpoints: + list: + path: /api/storages/azure_spi/ + method: GET + auth: true + docs: >- + Get list of all Azure import storage connections set up with Service + Principal authentication. + source: + openapi: openapi/openapi.yaml + display-name: Get Azure SPI import storage + request: + name: AzureSpiListRequest + query-parameters: + ordering: + type: optional + docs: Which field to use when ordering the results. + project: + type: optional + docs: Project ID + response: + docs: '' + type: list + examples: + - response: + body: + - account_name: account_name + client_id: client_id + client_secret: client_secret + container: container + created_at: '2024-01-15T09:30:00Z' + description: description + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + presign: true + presign_ttl: 1 + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + tenant_id: tenant_id + title: title + traceback: traceback + type: type + use_blob_urls: true + user_delegation_key: user_delegation_key + audiences: + - public + create: + path: /api/storages/azure_spi/ + method: POST + auth: true + docs: Create Azure import storage with Service Principal authentication. + source: + openapi: openapi/openapi.yaml + display-name: Create Azure import storage with SPI + request: + body: root.AzureServicePrincipalImportStorageRequest + content-type: application/json + response: + docs: '' + type: root.AzureServicePrincipalImportStorage + examples: + - request: + project: 1 + response: + body: + account_name: account_name + client_id: client_id + client_secret: client_secret + container: container + created_at: '2024-01-15T09:30:00Z' + description: description + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + presign: true + presign_ttl: 1 + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + tenant_id: tenant_id + title: title + traceback: traceback + type: type + use_blob_urls: true + user_delegation_key: user_delegation_key + audiences: + - public + validate: + path: /api/storages/azure_spi/validate + method: POST + auth: true + docs: >- + Validate a specific Azure import storage connection that was set up with + Service Principal authentication. + source: + openapi: openapi/openapi.yaml + display-name: Validate Azure SPI import storage + request: + body: root.AzureServicePrincipalImportStorageRequest + content-type: application/json + examples: + - request: + project: 1 + audiences: + - public + get: + path: /api/storages/azure_spi/{id} + method: GET + auth: true + docs: >- + Get a specific Azure import storage connection that was set up with + Service Principal authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Get Azure SPI import storage + response: + docs: '' + type: root.AzureServicePrincipalImportStorage + examples: + - path-parameters: + id: 1 + response: + body: + account_name: account_name + client_id: client_id + client_secret: client_secret + container: container + created_at: '2024-01-15T09:30:00Z' + description: description + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + presign: true + presign_ttl: 1 + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + tenant_id: tenant_id + title: title + traceback: traceback + type: type + use_blob_urls: true + user_delegation_key: user_delegation_key + audiences: + - public + delete: + path: /api/storages/azure_spi/{id} + method: DELETE + auth: true + docs: >- + Delete a specific Azure import storage connection that was set up with + Service Principal authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Delete Azure SPI import storage + examples: + - path-parameters: + id: 1 + audiences: + - public + update: + path: /api/storages/azure_spi/{id} + method: PATCH + auth: true + docs: >- + Update a specific Azure import storage connection that was set up with + Service Principal authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Update Azure SPI import storage + request: + name: PatchedAzureServicePrincipalImportStorageRequest + body: + properties: + account_name: + type: optional + docs: Azure Blob account name + client_id: + type: optional + docs: Azure Blob Service Principal Client ID + client_secret: + type: optional + docs: Azure Blob Service Principal Client Secret + container: + type: optional + docs: Azure blob container + description: + type: optional + docs: Cloud storage description + last_sync: + type: optional + docs: Last sync finished time + last_sync_count: + type: optional + docs: Count of tasks synced last time + validation: + min: 0 + max: 2147483647 + last_sync_job: + type: optional + docs: Last sync job ID + validation: + maxLength: 256 + meta: optional + prefix: + type: optional + docs: Azure blob prefix name + presign: + type: optional + default: true + presign_ttl: + type: optional + docs: Presigned URLs TTL (in minutes) + validation: + min: 0 + max: 32767 + project: + type: optional + docs: A unique integer value identifying this project. + regex_filter: + type: optional + docs: Cloud storage regex for filtering objects + status: optional + synchronizable: + type: optional + default: true + tenant_id: + type: optional + docs: Azure Tenant ID + title: + type: optional + docs: Cloud storage title + validation: + maxLength: 256 + traceback: + type: optional + docs: Traceback report for the last failed sync + use_blob_urls: + type: optional + docs: Interpret objects as BLOBs and generate URLs + user_delegation_key: + type: optional + docs: User Delegation Key (Backend) + content-type: application/json + response: + docs: '' + type: root.AzureServicePrincipalImportStorage + examples: + - path-parameters: + id: 1 + request: {} + response: + body: + account_name: account_name + client_id: client_id + client_secret: client_secret + container: container + created_at: '2024-01-15T09:30:00Z' + description: description + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + presign: true + presign_ttl: 1 + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + tenant_id: tenant_id + title: title + traceback: traceback + type: type + use_blob_urls: true + user_delegation_key: user_delegation_key + audiences: + - public + sync: + path: /api/storages/azure_spi/{id}/sync + method: POST + auth: true + docs: >- + Sync tasks from an Azure import storage connection that was set up with + Service Principal authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Sync Azure SPI import storage + response: + docs: '' + type: root.AzureServicePrincipalImportStorage + examples: + - path-parameters: + id: 1 + response: + body: + account_name: account_name + client_id: client_id + client_secret: client_secret + container: container + created_at: '2024-01-15T09:30:00Z' + description: description + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + presign: true + presign_ttl: 1 + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + tenant_id: tenant_id + title: title + traceback: traceback + type: type + use_blob_urls: true + user_delegation_key: user_delegation_key + audiences: + - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/importStorage/gcswif.yml b/.mock/definition/importStorage/gcswif.yml new file mode 100644 index 000000000..39e1762b3 --- /dev/null +++ b/.mock/definition/importStorage/gcswif.yml @@ -0,0 +1,363 @@ +imports: + root: ../__package__.yml +service: + auth: false + base-path: '' + endpoints: + list: + path: /api/storages/gcswif/ + method: GET + auth: true + docs: >- + Get list of all GCS import storage connections set up with WIF + authentication. + source: + openapi: openapi/openapi.yaml + display-name: Get GCS WIF import storage + request: + name: GcswifListRequest + query-parameters: + ordering: + type: optional + docs: Which field to use when ordering the results. + project: + type: optional + docs: Project ID + response: + docs: '' + type: list + examples: + - response: + body: + - bucket: bucket + created_at: '2024-01-15T09:30:00Z' + description: description + google_application_credentials: google_application_credentials + google_project_id: google_project_id + google_project_number: google_project_number + google_service_account_email: google_service_account_email + google_wif_pool_id: google_wif_pool_id + google_wif_provider_id: google_wif_provider_id + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + presign: true + presign_ttl: 1 + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + title: title + traceback: traceback + type: type + use_blob_urls: true + audiences: + - public + create: + path: /api/storages/gcswif/ + method: POST + auth: true + docs: Create GCS import storage with WIF. + source: + openapi: openapi/openapi.yaml + display-name: Create GCS import storage with WIF + request: + body: root.GcswifImportStorageRequest + content-type: application/json + response: + docs: '' + type: root.GcswifImportStorage + examples: + - request: + project: 1 + response: + body: + bucket: bucket + created_at: '2024-01-15T09:30:00Z' + description: description + google_application_credentials: google_application_credentials + google_project_id: google_project_id + google_project_number: google_project_number + google_service_account_email: google_service_account_email + google_wif_pool_id: google_wif_pool_id + google_wif_provider_id: google_wif_provider_id + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + presign: true + presign_ttl: 1 + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + title: title + traceback: traceback + type: type + use_blob_urls: true + audiences: + - public + validate: + path: /api/storages/gcswif/validate + method: POST + auth: true + docs: >- + Validate a specific GCS import storage connection that was set up with + WIF authentication. + source: + openapi: openapi/openapi.yaml + display-name: Validate GCS WIF import storage + request: + body: root.GcswifImportStorageRequest + content-type: application/json + examples: + - request: + project: 1 + audiences: + - public + get: + path: /api/storages/gcswif/{id} + method: GET + auth: true + docs: Get a specific GCS import storage connection that was set up with WIF. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Get GCS WIF import storage + response: + docs: '' + type: root.GcswifImportStorage + examples: + - path-parameters: + id: 1 + response: + body: + bucket: bucket + created_at: '2024-01-15T09:30:00Z' + description: description + google_application_credentials: google_application_credentials + google_project_id: google_project_id + google_project_number: google_project_number + google_service_account_email: google_service_account_email + google_wif_pool_id: google_wif_pool_id + google_wif_provider_id: google_wif_provider_id + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + presign: true + presign_ttl: 1 + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + title: title + traceback: traceback + type: type + use_blob_urls: true + audiences: + - public + delete: + path: /api/storages/gcswif/{id} + method: DELETE + auth: true + docs: >- + Delete a specific GCS import storage connection that was set up with WIF + authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Delete GCS WIF import storage + examples: + - path-parameters: + id: 1 + audiences: + - public + update: + path: /api/storages/gcswif/{id} + method: PATCH + auth: true + docs: >- + Update a specific GCS import storage connection that was set up with WIF + authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Update GCS WIF import storage + request: + name: PatchedGcswifImportStorageRequest + body: + properties: + bucket: + type: optional + docs: GCS bucket name + description: + type: optional + docs: Cloud storage description + google_application_credentials: + type: optional + docs: The content of GOOGLE_APPLICATION_CREDENTIALS json file + google_project_id: + type: optional + docs: Google project ID + google_project_number: + type: optional + docs: Google project number + google_service_account_email: + type: optional + docs: Google service account email + google_wif_pool_id: + type: optional + docs: Google WIF pool ID + google_wif_provider_id: + type: optional + docs: Google WIF provider ID + last_sync: + type: optional + docs: Last sync finished time + last_sync_count: + type: optional + docs: Count of tasks synced last time + validation: + min: 0 + max: 2147483647 + last_sync_job: + type: optional + docs: Last sync job ID + validation: + maxLength: 256 + meta: optional + prefix: + type: optional + docs: GCS bucket prefix + presign: + type: optional + default: true + presign_ttl: + type: optional + docs: Presigned URLs TTL (in minutes) + validation: + min: 0 + max: 32767 + project: + type: optional + docs: A unique integer value identifying this project. + regex_filter: + type: optional + docs: Cloud storage regex for filtering objects + status: optional + synchronizable: + type: optional + default: true + title: + type: optional + docs: Cloud storage title + validation: + maxLength: 256 + traceback: + type: optional + docs: Traceback report for the last failed sync + use_blob_urls: + type: optional + docs: Interpret objects as BLOBs and generate URLs + content-type: application/json + response: + docs: '' + type: root.GcswifImportStorage + examples: + - path-parameters: + id: 1 + request: {} + response: + body: + bucket: bucket + created_at: '2024-01-15T09:30:00Z' + description: description + google_application_credentials: google_application_credentials + google_project_id: google_project_id + google_project_number: google_project_number + google_service_account_email: google_service_account_email + google_wif_pool_id: google_wif_pool_id + google_wif_provider_id: google_wif_provider_id + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + presign: true + presign_ttl: 1 + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + title: title + traceback: traceback + type: type + use_blob_urls: true + audiences: + - public + sync: + path: /api/storages/gcswif/{id}/sync + method: POST + auth: true + docs: >- + Sync tasks from an GCS import storage connection that was set up with + WIF authentication. + source: + openapi: openapi/openapi.yaml + path-parameters: + id: integer + display-name: Sync GCS WIF import storage + response: + docs: '' + type: root.GcswifImportStorage + examples: + - path-parameters: + id: 1 + response: + body: + bucket: bucket + created_at: '2024-01-15T09:30:00Z' + description: description + google_application_credentials: google_application_credentials + google_project_id: google_project_id + google_project_number: google_project_number + google_service_account_email: google_service_account_email + google_wif_pool_id: google_wif_pool_id + google_wif_provider_id: google_wif_provider_id + id: 1 + last_sync: '2024-01-15T09:30:00Z' + last_sync_count: 1 + last_sync_job: last_sync_job + meta: + key: value + prefix: prefix + presign: true + presign_ttl: 1 + project: 1 + regex_filter: regex_filter + status: initialized + synchronizable: true + title: title + traceback: traceback + type: type + use_blob_urls: true + audiences: + - public + source: + openapi: openapi/openapi.yaml diff --git a/.mock/definition/storageAzureSpi.yml b/.mock/definition/storageAzureSpi.yml deleted file mode 100644 index 452cda177..000000000 --- a/.mock/definition/storageAzureSpi.yml +++ /dev/null @@ -1,730 +0,0 @@ -imports: - root: __package__.yml -service: - auth: false - base-path: '' - endpoints: - api_storages_azure_spi_list: - path: /api/storages/azure_spi/ - method: GET - auth: true - docs: >- - Get list of all Azure import storage connections set up with Service - Principal authentication. - source: - openapi: openapi/openapi.yaml - display-name: Get Azure SPI import storage - request: - name: ApiStoragesAzureSpiListRequest - query-parameters: - ordering: - type: optional - docs: Which field to use when ordering the results. - response: - docs: '' - type: list - examples: - - response: - body: - - account_name: account_name - client_id: client_id - client_secret: client_secret - container: container - created_at: '2024-01-15T09:30:00Z' - description: description - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - presign: true - presign_ttl: 1 - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - tenant_id: tenant_id - title: title - traceback: traceback - type: type - use_blob_urls: true - user_delegation_key: user_delegation_key - api_storages_azure_spi_create: - path: /api/storages/azure_spi/ - method: POST - auth: true - docs: Create Azure import storage with Service Principal authentication. - source: - openapi: openapi/openapi.yaml - display-name: Create Azure import storage with SPI - request: - body: root.AzureServicePrincipalImportStorageRequest - content-type: application/json - response: - docs: '' - type: root.AzureServicePrincipalImportStorage - examples: - - request: - project: 1 - response: - body: - account_name: account_name - client_id: client_id - client_secret: client_secret - container: container - created_at: '2024-01-15T09:30:00Z' - description: description - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - presign: true - presign_ttl: 1 - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - tenant_id: tenant_id - title: title - traceback: traceback - type: type - use_blob_urls: true - user_delegation_key: user_delegation_key - api_storages_azure_spi_validate_create: - path: /api/storages/azure_spi/validate - method: POST - auth: true - docs: >- - Validate a specific Azure import storage connection that was set up with - Service Principal authentication. - source: - openapi: openapi/openapi.yaml - display-name: Validate Azure SPI import storage - request: - body: root.AzureServicePrincipalImportStorageRequest - content-type: application/json - response: - docs: '' - type: root.AzureServicePrincipalImportStorage - examples: - - request: - project: 1 - response: - body: - account_name: account_name - client_id: client_id - client_secret: client_secret - container: container - created_at: '2024-01-15T09:30:00Z' - description: description - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - presign: true - presign_ttl: 1 - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - tenant_id: tenant_id - title: title - traceback: traceback - type: type - use_blob_urls: true - user_delegation_key: user_delegation_key - api_storages_azure_spi_retrieve: - path: /api/storages/azure_spi/{id} - method: GET - auth: true - docs: >- - Get a specific Azure import storage connection that was set up with - Service Principal authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Get Azure SPI import storage - response: - docs: '' - type: root.AzureServicePrincipalImportStorage - examples: - - path-parameters: - id: 1 - response: - body: - account_name: account_name - client_id: client_id - client_secret: client_secret - container: container - created_at: '2024-01-15T09:30:00Z' - description: description - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - presign: true - presign_ttl: 1 - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - tenant_id: tenant_id - title: title - traceback: traceback - type: type - use_blob_urls: true - user_delegation_key: user_delegation_key - api_storages_azure_spi_destroy: - path: /api/storages/azure_spi/{id} - method: DELETE - auth: true - docs: >- - Delete a specific Azure import storage connection that was set up with - Service Principal authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Delete Azure SPI import storage - examples: - - path-parameters: - id: 1 - api_storages_azure_spi_partial_update: - path: /api/storages/azure_spi/{id} - method: PATCH - auth: true - docs: >- - Update a specific Azure import storage connection that was set up with - Service Principal authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Update Azure SPI import storage - request: - name: PatchedAzureServicePrincipalImportStorageRequest - body: - properties: - account_name: - type: optional - docs: Azure Blob account name - client_id: - type: optional - docs: Azure Blob Service Principal Client ID - client_secret: - type: optional - docs: Azure Blob Service Principal Client Secret - container: - type: optional - docs: Azure blob container - description: - type: optional - docs: Cloud storage description - last_sync: - type: optional - docs: Last sync finished time - last_sync_count: - type: optional - docs: Count of tasks synced last time - validation: - min: 0 - max: 2147483647 - last_sync_job: - type: optional - docs: Last sync job ID - validation: - maxLength: 256 - meta: optional - prefix: - type: optional - docs: Azure blob prefix name - presign: - type: optional - default: true - presign_ttl: - type: optional - docs: Presigned URLs TTL (in minutes) - validation: - min: 0 - max: 32767 - project: - type: optional - docs: A unique integer value identifying this project. - regex_filter: - type: optional - docs: Cloud storage regex for filtering objects - status: optional - synchronizable: - type: optional - default: true - tenant_id: - type: optional - docs: Azure Tenant ID - title: - type: optional - docs: Cloud storage title - validation: - maxLength: 256 - traceback: - type: optional - docs: Traceback report for the last failed sync - use_blob_urls: - type: optional - docs: Interpret objects as BLOBs and generate URLs - user_delegation_key: - type: optional - docs: User Delegation Key (Backend) - content-type: application/json - response: - docs: '' - type: root.AzureServicePrincipalImportStorage - examples: - - path-parameters: - id: 1 - request: {} - response: - body: - account_name: account_name - client_id: client_id - client_secret: client_secret - container: container - created_at: '2024-01-15T09:30:00Z' - description: description - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - presign: true - presign_ttl: 1 - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - tenant_id: tenant_id - title: title - traceback: traceback - type: type - use_blob_urls: true - user_delegation_key: user_delegation_key - api_storages_azure_spi_sync_create: - path: /api/storages/azure_spi/{id}/sync - method: POST - auth: true - docs: >- - Sync tasks from an Azure import storage connection that was set up with - Service Principal authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Sync Azure SPI import storage - request: - body: root.AzureServicePrincipalImportStorageRequest - content-type: application/json - response: - docs: '' - type: root.AzureServicePrincipalImportStorage - examples: - - path-parameters: - id: 1 - request: - project: 1 - response: - body: - account_name: account_name - client_id: client_id - client_secret: client_secret - container: container - created_at: '2024-01-15T09:30:00Z' - description: description - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - presign: true - presign_ttl: 1 - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - tenant_id: tenant_id - title: title - traceback: traceback - type: type - use_blob_urls: true - user_delegation_key: user_delegation_key - api_storages_export_azure_spi_list: - path: /api/storages/export/azure_spi - method: GET - auth: true - docs: >- - Get a list of all Azure export storage connections that were set up with - Service Principal authentication. - source: - openapi: openapi/openapi.yaml - display-name: Get all Azure SPI export storage - request: - name: ApiStoragesExportAzureSpiListRequest - query-parameters: - ordering: - type: optional - docs: Which field to use when ordering the results. - response: - docs: '' - type: list - examples: - - response: - body: - - account_name: account_name - can_delete_objects: true - client_id: client_id - client_secret: client_secret - container: container - created_at: '2024-01-15T09:30:00Z' - description: description - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - tenant_id: tenant_id - title: title - traceback: traceback - type: type - use_blob_urls: true - user_delegation_key: user_delegation_key - api_storages_export_azure_spi_create: - path: /api/storages/export/azure_spi - method: POST - auth: true - docs: >- - Create an Azure export storage connection with Service Principal - authentication to store annotations. - source: - openapi: openapi/openapi.yaml - display-name: Create Azure export storage with SPI authentication - request: - body: root.AzureServicePrincipalExportStorageRequest - content-type: application/json - response: - docs: '' - type: root.AzureServicePrincipalExportStorage - examples: - - request: - project: 1 - response: - body: - account_name: account_name - can_delete_objects: true - client_id: client_id - client_secret: client_secret - container: container - created_at: '2024-01-15T09:30:00Z' - description: description - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - tenant_id: tenant_id - title: title - traceback: traceback - type: type - use_blob_urls: true - user_delegation_key: user_delegation_key - api_storages_export_azure_spi_validate_create: - path: /api/storages/export/azure_spi/validate - method: POST - auth: true - docs: >- - Validate a specific Azure export storage connection that was set up with - Service Principal authentication. - source: - openapi: openapi/openapi.yaml - display-name: Validate Azure SPI export storage - request: - body: root.AzureServicePrincipalExportStorageRequest - content-type: application/json - response: - docs: '' - type: root.AzureServicePrincipalExportStorage - examples: - - request: - project: 1 - response: - body: - account_name: account_name - can_delete_objects: true - client_id: client_id - client_secret: client_secret - container: container - created_at: '2024-01-15T09:30:00Z' - description: description - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - tenant_id: tenant_id - title: title - traceback: traceback - type: type - use_blob_urls: true - user_delegation_key: user_delegation_key - api_storages_export_azure_spi_retrieve: - path: /api/storages/export/azure_spi/{id} - method: GET - auth: true - docs: >- - Get a specific Azure export storage connection that was set up with - Service Principal authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Get Azure SPI export storage - response: - docs: '' - type: root.AzureServicePrincipalExportStorage - examples: - - path-parameters: - id: 1 - response: - body: - account_name: account_name - can_delete_objects: true - client_id: client_id - client_secret: client_secret - container: container - created_at: '2024-01-15T09:30:00Z' - description: description - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - tenant_id: tenant_id - title: title - traceback: traceback - type: type - use_blob_urls: true - user_delegation_key: user_delegation_key - api_storages_export_azure_spi_destroy: - path: /api/storages/export/azure_spi/{id} - method: DELETE - auth: true - docs: >- - Delete a specific Azure export storage connection that was set up with - Service Principal authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Delete Azure SPI export storage - examples: - - path-parameters: - id: 1 - api_storages_export_azure_spi_partial_update: - path: /api/storages/export/azure_spi/{id} - method: PATCH - auth: true - docs: >- - Update a specific Azure export storage connection that was set up with - Service Principal authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Update Azure SPI export storage - request: - name: PatchedAzureServicePrincipalExportStorageRequest - body: - properties: - account_name: - type: optional - docs: Azure Blob account name - can_delete_objects: - type: optional - docs: Deletion from storage enabled - client_id: - type: optional - docs: Azure Blob Service Principal Client ID - client_secret: - type: optional - docs: Azure Blob Service Principal Client Secret - container: - type: optional - docs: Azure blob container - description: - type: optional - docs: Cloud storage description - last_sync: - type: optional - docs: Last sync finished time - last_sync_count: - type: optional - docs: Count of tasks synced last time - validation: - min: 0 - max: 2147483647 - last_sync_job: - type: optional - docs: Last sync job ID - validation: - maxLength: 256 - meta: optional - prefix: - type: optional - docs: Azure blob prefix name - project: - type: optional - docs: A unique integer value identifying this project. - regex_filter: - type: optional - docs: Cloud storage regex for filtering objects - status: optional - synchronizable: - type: optional - default: true - tenant_id: - type: optional - docs: Azure Tenant ID - title: - type: optional - docs: Cloud storage title - validation: - maxLength: 256 - traceback: - type: optional - docs: Traceback report for the last failed sync - use_blob_urls: - type: optional - docs: Interpret objects as BLOBs and generate URLs - user_delegation_key: - type: optional - docs: User Delegation Key (Backend) - content-type: application/json - response: - docs: '' - type: root.AzureServicePrincipalExportStorage - examples: - - path-parameters: - id: 1 - request: {} - response: - body: - account_name: account_name - can_delete_objects: true - client_id: client_id - client_secret: client_secret - container: container - created_at: '2024-01-15T09:30:00Z' - description: description - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - tenant_id: tenant_id - title: title - traceback: traceback - type: type - use_blob_urls: true - user_delegation_key: user_delegation_key - api_storages_export_azure_spi_sync_create: - path: /api/storages/export/azure_spi/{id}/sync - method: POST - auth: true - docs: Sync tasks from an Azure SPI export storage. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Sync Azure SPI export storage - request: - body: root.AzureServicePrincipalExportStorageRequest - content-type: application/json - response: - docs: '' - type: root.AzureServicePrincipalExportStorage - examples: - - path-parameters: - id: 1 - request: - project: 1 - response: - body: - account_name: account_name - can_delete_objects: true - client_id: client_id - client_secret: client_secret - container: container - created_at: '2024-01-15T09:30:00Z' - description: description - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - tenant_id: tenant_id - title: title - traceback: traceback - type: type - use_blob_urls: true - user_delegation_key: user_delegation_key - source: - openapi: openapi/openapi.yaml diff --git a/.mock/definition/storageGcsWif.yml b/.mock/definition/storageGcsWif.yml deleted file mode 100644 index 4505b555e..000000000 --- a/.mock/definition/storageGcsWif.yml +++ /dev/null @@ -1,746 +0,0 @@ -imports: - root: __package__.yml -service: - auth: false - base-path: '' - endpoints: - api_storages_export_gcswif_list: - path: /api/storages/export/gcswif - method: GET - auth: true - docs: >- - Get a list of all GCS export storage connections that were set up with - WIF authentication. - source: - openapi: openapi/openapi.yaml - display-name: Get all GCS WIF export storage - request: - name: ApiStoragesExportGcswifListRequest - query-parameters: - ordering: - type: optional - docs: Which field to use when ordering the results. - response: - docs: '' - type: list - examples: - - response: - body: - - bucket: bucket - can_delete_objects: true - created_at: '2024-01-15T09:30:00Z' - description: description - google_application_credentials: google_application_credentials - google_project_id: google_project_id - google_project_number: google_project_number - google_service_account_email: google_service_account_email - google_wif_pool_id: google_wif_pool_id - google_wif_provider_id: google_wif_provider_id - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - title: title - traceback: traceback - type: type - use_blob_urls: true - api_storages_export_gcswif_create: - path: /api/storages/export/gcswif - method: POST - auth: true - docs: >- - Create an GCS export storage connection with WIF authentication to store - annotations. - source: - openapi: openapi/openapi.yaml - display-name: Create GCS export storage with WIF authentication - request: - body: root.GcswifExportStorageRequest - content-type: application/json - response: - docs: '' - type: root.GcswifExportStorage - examples: - - request: - project: 1 - response: - body: - bucket: bucket - can_delete_objects: true - created_at: '2024-01-15T09:30:00Z' - description: description - google_application_credentials: google_application_credentials - google_project_id: google_project_id - google_project_number: google_project_number - google_service_account_email: google_service_account_email - google_wif_pool_id: google_wif_pool_id - google_wif_provider_id: google_wif_provider_id - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - title: title - traceback: traceback - type: type - use_blob_urls: true - api_storages_export_gcswif_validate_create: - path: /api/storages/export/gcswif/validate - method: POST - auth: true - docs: >- - Validate a specific GCS export storage connection that was set up with - WIF authentication. - source: - openapi: openapi/openapi.yaml - display-name: Validate GCS WIF export storage - request: - body: root.GcswifExportStorageRequest - content-type: application/json - response: - docs: '' - type: root.GcswifExportStorage - examples: - - request: - project: 1 - response: - body: - bucket: bucket - can_delete_objects: true - created_at: '2024-01-15T09:30:00Z' - description: description - google_application_credentials: google_application_credentials - google_project_id: google_project_id - google_project_number: google_project_number - google_service_account_email: google_service_account_email - google_wif_pool_id: google_wif_pool_id - google_wif_provider_id: google_wif_provider_id - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - title: title - traceback: traceback - type: type - use_blob_urls: true - api_storages_export_gcswif_retrieve: - path: /api/storages/export/gcswif/{id} - method: GET - auth: true - docs: >- - Get a specific GCS export storage connection that was set up with WIF - authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Get GCS WIF export storage - response: - docs: '' - type: root.GcswifExportStorage - examples: - - path-parameters: - id: 1 - response: - body: - bucket: bucket - can_delete_objects: true - created_at: '2024-01-15T09:30:00Z' - description: description - google_application_credentials: google_application_credentials - google_project_id: google_project_id - google_project_number: google_project_number - google_service_account_email: google_service_account_email - google_wif_pool_id: google_wif_pool_id - google_wif_provider_id: google_wif_provider_id - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - title: title - traceback: traceback - type: type - use_blob_urls: true - api_storages_export_gcswif_destroy: - path: /api/storages/export/gcswif/{id} - method: DELETE - auth: true - docs: >- - Delete a specific GCS export storage connection that was set up with WIF - authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Delete GCS WIF export storage - examples: - - path-parameters: - id: 1 - api_storages_export_gcswif_partial_update: - path: /api/storages/export/gcswif/{id} - method: PATCH - auth: true - docs: >- - Update a specific GCS export storage connection that was set up with WIF - authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Update GCS WIF export storage - request: - name: PatchedGcswifExportStorageRequest - body: - properties: - bucket: - type: optional - docs: GCS bucket name - can_delete_objects: - type: optional - docs: Deletion from storage enabled - description: - type: optional - docs: Cloud storage description - google_application_credentials: - type: optional - docs: The content of GOOGLE_APPLICATION_CREDENTIALS json file - google_project_id: - type: optional - docs: Google project ID - google_project_number: - type: optional - docs: Google project number - google_service_account_email: - type: optional - docs: Google service account email - google_wif_pool_id: - type: optional - docs: Google WIF pool ID - google_wif_provider_id: - type: optional - docs: Google WIF provider ID - last_sync: - type: optional - docs: Last sync finished time - last_sync_count: - type: optional - docs: Count of tasks synced last time - validation: - min: 0 - max: 2147483647 - last_sync_job: - type: optional - docs: Last sync job ID - validation: - maxLength: 256 - meta: optional - prefix: - type: optional - docs: GCS bucket prefix - project: - type: optional - docs: A unique integer value identifying this project. - regex_filter: - type: optional - docs: Cloud storage regex for filtering objects - status: optional - synchronizable: - type: optional - default: true - title: - type: optional - docs: Cloud storage title - validation: - maxLength: 256 - traceback: - type: optional - docs: Traceback report for the last failed sync - use_blob_urls: - type: optional - docs: Interpret objects as BLOBs and generate URLs - content-type: application/json - response: - docs: '' - type: root.GcswifExportStorage - examples: - - path-parameters: - id: 1 - request: {} - response: - body: - bucket: bucket - can_delete_objects: true - created_at: '2024-01-15T09:30:00Z' - description: description - google_application_credentials: google_application_credentials - google_project_id: google_project_id - google_project_number: google_project_number - google_service_account_email: google_service_account_email - google_wif_pool_id: google_wif_pool_id - google_wif_provider_id: google_wif_provider_id - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - title: title - traceback: traceback - type: type - use_blob_urls: true - api_storages_export_gcswif_sync_create: - path: /api/storages/export/gcswif/{id}/sync - method: POST - auth: true - docs: Sync tasks from an GCS WIF export storage. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Sync GCS WIF export storage - request: - body: root.GcswifExportStorageRequest - content-type: application/json - response: - docs: '' - type: root.GcswifExportStorage - examples: - - path-parameters: - id: 1 - request: - project: 1 - response: - body: - bucket: bucket - can_delete_objects: true - created_at: '2024-01-15T09:30:00Z' - description: description - google_application_credentials: google_application_credentials - google_project_id: google_project_id - google_project_number: google_project_number - google_service_account_email: google_service_account_email - google_wif_pool_id: google_wif_pool_id - google_wif_provider_id: google_wif_provider_id - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - title: title - traceback: traceback - type: type - use_blob_urls: true - api_storages_gcswif_list: - path: /api/storages/gcswif/ - method: GET - auth: true - docs: >- - Get list of all GCS import storage connections set up with WIF - authentication. - source: - openapi: openapi/openapi.yaml - display-name: Get GCS WIF import storage - request: - name: ApiStoragesGcswifListRequest - query-parameters: - ordering: - type: optional - docs: Which field to use when ordering the results. - response: - docs: '' - type: list - examples: - - response: - body: - - bucket: bucket - created_at: '2024-01-15T09:30:00Z' - description: description - google_application_credentials: google_application_credentials - google_project_id: google_project_id - google_project_number: google_project_number - google_service_account_email: google_service_account_email - google_wif_pool_id: google_wif_pool_id - google_wif_provider_id: google_wif_provider_id - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - presign: true - presign_ttl: 1 - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - title: title - traceback: traceback - type: type - use_blob_urls: true - api_storages_gcswif_create: - path: /api/storages/gcswif/ - method: POST - auth: true - docs: Create GCS import storage with WIF. - source: - openapi: openapi/openapi.yaml - display-name: Create GCS import storage with WIF - request: - body: root.GcswifImportStorageRequest - content-type: application/json - response: - docs: '' - type: root.GcswifImportStorage - examples: - - request: - project: 1 - response: - body: - bucket: bucket - created_at: '2024-01-15T09:30:00Z' - description: description - google_application_credentials: google_application_credentials - google_project_id: google_project_id - google_project_number: google_project_number - google_service_account_email: google_service_account_email - google_wif_pool_id: google_wif_pool_id - google_wif_provider_id: google_wif_provider_id - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - presign: true - presign_ttl: 1 - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - title: title - traceback: traceback - type: type - use_blob_urls: true - api_storages_gcswif_validate_create: - path: /api/storages/gcswif/validate - method: POST - auth: true - docs: >- - Validate a specific GCS import storage connection that was set up with - WIF authentication. - source: - openapi: openapi/openapi.yaml - display-name: Validate GCS WIF import storage - request: - body: root.GcswifImportStorageRequest - content-type: application/json - response: - docs: '' - type: root.GcswifImportStorage - examples: - - request: - project: 1 - response: - body: - bucket: bucket - created_at: '2024-01-15T09:30:00Z' - description: description - google_application_credentials: google_application_credentials - google_project_id: google_project_id - google_project_number: google_project_number - google_service_account_email: google_service_account_email - google_wif_pool_id: google_wif_pool_id - google_wif_provider_id: google_wif_provider_id - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - presign: true - presign_ttl: 1 - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - title: title - traceback: traceback - type: type - use_blob_urls: true - api_storages_gcswif_retrieve: - path: /api/storages/gcswif/{id} - method: GET - auth: true - docs: Get a specific GCS import storage connection that was set up with WIF. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Get GCS WIF import storage - response: - docs: '' - type: root.GcswifImportStorage - examples: - - path-parameters: - id: 1 - response: - body: - bucket: bucket - created_at: '2024-01-15T09:30:00Z' - description: description - google_application_credentials: google_application_credentials - google_project_id: google_project_id - google_project_number: google_project_number - google_service_account_email: google_service_account_email - google_wif_pool_id: google_wif_pool_id - google_wif_provider_id: google_wif_provider_id - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - presign: true - presign_ttl: 1 - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - title: title - traceback: traceback - type: type - use_blob_urls: true - api_storages_gcswif_destroy: - path: /api/storages/gcswif/{id} - method: DELETE - auth: true - docs: >- - Delete a specific GCS import storage connection that was set up with WIF - authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Delete GCS WIF import storage - examples: - - path-parameters: - id: 1 - api_storages_gcswif_partial_update: - path: /api/storages/gcswif/{id} - method: PATCH - auth: true - docs: >- - Update a specific GCS import storage connection that was set up with WIF - authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Update GCS WIF import storage - request: - name: PatchedGcswifImportStorageRequest - body: - properties: - bucket: - type: optional - docs: GCS bucket name - description: - type: optional - docs: Cloud storage description - google_application_credentials: - type: optional - docs: The content of GOOGLE_APPLICATION_CREDENTIALS json file - google_project_id: - type: optional - docs: Google project ID - google_project_number: - type: optional - docs: Google project number - google_service_account_email: - type: optional - docs: Google service account email - google_wif_pool_id: - type: optional - docs: Google WIF pool ID - google_wif_provider_id: - type: optional - docs: Google WIF provider ID - last_sync: - type: optional - docs: Last sync finished time - last_sync_count: - type: optional - docs: Count of tasks synced last time - validation: - min: 0 - max: 2147483647 - last_sync_job: - type: optional - docs: Last sync job ID - validation: - maxLength: 256 - meta: optional - prefix: - type: optional - docs: GCS bucket prefix - presign: - type: optional - default: true - presign_ttl: - type: optional - docs: Presigned URLs TTL (in minutes) - validation: - min: 0 - max: 32767 - project: - type: optional - docs: A unique integer value identifying this project. - regex_filter: - type: optional - docs: Cloud storage regex for filtering objects - status: optional - synchronizable: - type: optional - default: true - title: - type: optional - docs: Cloud storage title - validation: - maxLength: 256 - traceback: - type: optional - docs: Traceback report for the last failed sync - use_blob_urls: - type: optional - docs: Interpret objects as BLOBs and generate URLs - content-type: application/json - response: - docs: '' - type: root.GcswifImportStorage - examples: - - path-parameters: - id: 1 - request: {} - response: - body: - bucket: bucket - created_at: '2024-01-15T09:30:00Z' - description: description - google_application_credentials: google_application_credentials - google_project_id: google_project_id - google_project_number: google_project_number - google_service_account_email: google_service_account_email - google_wif_pool_id: google_wif_pool_id - google_wif_provider_id: google_wif_provider_id - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - presign: true - presign_ttl: 1 - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - title: title - traceback: traceback - type: type - use_blob_urls: true - api_storages_gcswif_sync_create: - path: /api/storages/gcswif/{id}/sync - method: POST - auth: true - docs: >- - Sync tasks from an GCS import storage connection that was set up with - WIF authentication. - source: - openapi: openapi/openapi.yaml - path-parameters: - id: integer - display-name: Sync GCS WIF import storage - request: - body: root.GcswifImportStorageRequest - content-type: application/json - response: - docs: '' - type: root.GcswifImportStorage - examples: - - path-parameters: - id: 1 - request: - project: 1 - response: - body: - bucket: bucket - created_at: '2024-01-15T09:30:00Z' - description: description - google_application_credentials: google_application_credentials - google_project_id: google_project_id - google_project_number: google_project_number - google_service_account_email: google_service_account_email - google_wif_pool_id: google_wif_pool_id - google_wif_provider_id: google_wif_provider_id - id: 1 - last_sync: '2024-01-15T09:30:00Z' - last_sync_count: 1 - last_sync_job: last_sync_job - meta: - key: value - prefix: prefix - presign: true - presign_ttl: 1 - project: 1 - regex_filter: regex_filter - status: initialized - synchronizable: true - title: title - traceback: traceback - type: type - use_blob_urls: true - source: - openapi: openapi/openapi.yaml diff --git a/.mock/openapi/openapi.yaml b/.mock/openapi/openapi.yaml index eb39afb25..037b22c0b 100644 --- a/.mock/openapi/openapi.yaml +++ b/.mock/openapi/openapi.yaml @@ -10489,9 +10489,13 @@ paths: - description: Which field to use when ordering the results. in: query name: ordering - required: false schema: type: string + - description: Project ID + in: query + name: project + schema: + type: integer responses: '200': content: @@ -10506,6 +10510,12 @@ paths: summary: Get Azure SPI import storage tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - azure_spi + x-fern-sdk-method-name: list post: description: Create Azure import storage with Service Principal authentication. operationId: api_storages_azure_spi_create @@ -10533,6 +10543,12 @@ paths: summary: Create Azure import storage with SPI tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - azure_spi + x-fern-sdk-method-name: create /api/storages/azure_spi/validate: post: description: Validate a specific Azure import storage connection that was set up with Service Principal authentication. @@ -10550,17 +10566,19 @@ paths: $ref: '#/components/schemas/AzureServicePrincipalImportStorageRequest' required: true responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/AzureServicePrincipalImportStorage' - description: '' + '200': + description: Validation successful security: - Token: [] summary: Validate Azure SPI import storage tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - azure_spi + x-fern-sdk-method-name: validate /api/storages/azure_spi/{id}: delete: description: Delete a specific Azure import storage connection that was set up with Service Principal authentication. @@ -10579,6 +10597,12 @@ paths: summary: Delete Azure SPI import storage tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - azure_spi + x-fern-sdk-method-name: delete get: description: Get a specific Azure import storage connection that was set up with Service Principal authentication. operationId: api_storages_azure_spi_retrieve @@ -10600,6 +10624,12 @@ paths: summary: Get Azure SPI import storage tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - azure_spi + x-fern-sdk-method-name: get patch: description: Update a specific Azure import storage connection that was set up with Service Principal authentication. operationId: api_storages_azure_spi_partial_update @@ -10632,6 +10662,12 @@ paths: summary: Update Azure SPI import storage tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - azure_spi + x-fern-sdk-method-name: update /api/storages/azure_spi/{id}/sync: post: description: Sync tasks from an Azure import storage connection that was set up with Service Principal authentication. @@ -10642,18 +10678,6 @@ paths: required: true schema: type: integer - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/AzureServicePrincipalImportStorageRequest' - application/x-www-form-urlencoded: - schema: - $ref: '#/components/schemas/AzureServicePrincipalImportStorageRequest' - multipart/form-data: - schema: - $ref: '#/components/schemas/AzureServicePrincipalImportStorageRequest' - required: true responses: '200': content: @@ -10666,6 +10690,12 @@ paths: summary: Sync Azure SPI import storage tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - azure_spi + x-fern-sdk-method-name: sync /api/storages/export: get: description: Retrieve a list of the export storages of all types with their IDs. @@ -10989,9 +11019,13 @@ paths: - description: Which field to use when ordering the results. in: query name: ordering - required: false schema: type: string + - description: Project ID + in: query + name: project + schema: + type: integer responses: '200': content: @@ -11006,6 +11040,12 @@ paths: summary: Get all Azure SPI export storage tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - azure_spi + x-fern-sdk-method-name: list post: description: Create an Azure export storage connection with Service Principal authentication to store annotations. operationId: api_storages_export_azure_spi_create @@ -11033,6 +11073,12 @@ paths: summary: Create Azure export storage with SPI authentication tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - azure_spi + x-fern-sdk-method-name: create /api/storages/export/azure_spi/validate: post: description: Validate a specific Azure export storage connection that was set up with Service Principal authentication. @@ -11050,17 +11096,19 @@ paths: $ref: '#/components/schemas/AzureServicePrincipalExportStorageRequest' required: true responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/AzureServicePrincipalExportStorage' - description: '' + '200': + description: Validation successful security: - Token: [] summary: Validate Azure SPI export storage tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - azure_spi + x-fern-sdk-method-name: validate /api/storages/export/azure_spi/{id}: delete: description: Delete a specific Azure export storage connection that was set up with Service Principal authentication. @@ -11079,6 +11127,12 @@ paths: summary: Delete Azure SPI export storage tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - azure_spi + x-fern-sdk-method-name: delete get: description: Get a specific Azure export storage connection that was set up with Service Principal authentication. operationId: api_storages_export_azure_spi_retrieve @@ -11100,6 +11154,12 @@ paths: summary: Get Azure SPI export storage tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - azure_spi + x-fern-sdk-method-name: get patch: description: Update a specific Azure export storage connection that was set up with Service Principal authentication. operationId: api_storages_export_azure_spi_partial_update @@ -11132,6 +11192,12 @@ paths: summary: Update Azure SPI export storage tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - azure_spi + x-fern-sdk-method-name: update /api/storages/export/azure_spi/{id}/sync: post: description: Sync tasks from an Azure SPI export storage. @@ -11142,18 +11208,6 @@ paths: required: true schema: type: integer - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/AzureServicePrincipalExportStorageRequest' - application/x-www-form-urlencoded: - schema: - $ref: '#/components/schemas/AzureServicePrincipalExportStorageRequest' - multipart/form-data: - schema: - $ref: '#/components/schemas/AzureServicePrincipalExportStorageRequest' - required: true responses: '200': content: @@ -11166,6 +11220,12 @@ paths: summary: Sync Azure SPI export storage tags: - 'Storage: Azure SPI' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - azure_spi + x-fern-sdk-method-name: sync /api/storages/export/gcs: get: description: Get a list of all GCS export storage connections. @@ -11457,9 +11517,13 @@ paths: - description: Which field to use when ordering the results. in: query name: ordering - required: false schema: type: string + - description: Project ID + in: query + name: project + schema: + type: integer responses: '200': content: @@ -11474,6 +11538,12 @@ paths: summary: Get all GCS WIF export storage tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - gcswif + x-fern-sdk-method-name: list post: description: Create an GCS export storage connection with WIF authentication to store annotations. operationId: api_storages_export_gcswif_create @@ -11501,6 +11571,12 @@ paths: summary: Create GCS export storage with WIF authentication tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - gcswif + x-fern-sdk-method-name: create /api/storages/export/gcswif/validate: post: description: Validate a specific GCS export storage connection that was set up with WIF authentication. @@ -11518,17 +11594,19 @@ paths: $ref: '#/components/schemas/GCSWIFExportStorageRequest' required: true responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/GCSWIFExportStorage' - description: '' + '200': + description: Validation successful security: - Token: [] summary: Validate GCS WIF export storage tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - gcswif + x-fern-sdk-method-name: validate /api/storages/export/gcswif/{id}: delete: description: Delete a specific GCS export storage connection that was set up with WIF authentication. @@ -11547,6 +11625,12 @@ paths: summary: Delete GCS WIF export storage tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - gcswif + x-fern-sdk-method-name: delete get: description: Get a specific GCS export storage connection that was set up with WIF authentication. operationId: api_storages_export_gcswif_retrieve @@ -11568,6 +11652,12 @@ paths: summary: Get GCS WIF export storage tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - gcswif + x-fern-sdk-method-name: get patch: description: Update a specific GCS export storage connection that was set up with WIF authentication. operationId: api_storages_export_gcswif_partial_update @@ -11600,6 +11690,12 @@ paths: summary: Update GCS WIF export storage tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - gcswif + x-fern-sdk-method-name: update /api/storages/export/gcswif/{id}/sync: post: description: Sync tasks from an GCS WIF export storage. @@ -11610,18 +11706,6 @@ paths: required: true schema: type: integer - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/GCSWIFExportStorageRequest' - application/x-www-form-urlencoded: - schema: - $ref: '#/components/schemas/GCSWIFExportStorageRequest' - multipart/form-data: - schema: - $ref: '#/components/schemas/GCSWIFExportStorageRequest' - required: true responses: '200': content: @@ -11634,6 +11718,12 @@ paths: summary: Sync GCS WIF export storage tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - export_storage + - gcswif + x-fern-sdk-method-name: sync /api/storages/export/localfiles: get: description: Get a list of all local file export storage connections. @@ -13078,9 +13168,13 @@ paths: - description: Which field to use when ordering the results. in: query name: ordering - required: false schema: type: string + - description: Project ID + in: query + name: project + schema: + type: integer responses: '200': content: @@ -13095,6 +13189,12 @@ paths: summary: Get GCS WIF import storage tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - gcswif + x-fern-sdk-method-name: list post: description: Create GCS import storage with WIF. operationId: api_storages_gcswif_create @@ -13122,6 +13222,12 @@ paths: summary: Create GCS import storage with WIF tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - gcswif + x-fern-sdk-method-name: create /api/storages/gcswif/validate: post: description: Validate a specific GCS import storage connection that was set up with WIF authentication. @@ -13139,17 +13245,19 @@ paths: $ref: '#/components/schemas/GCSWIFImportStorageRequest' required: true responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/GCSWIFImportStorage' - description: '' + '200': + description: Validation successful security: - Token: [] summary: Validate GCS WIF import storage tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - gcswif + x-fern-sdk-method-name: validate /api/storages/gcswif/{id}: delete: description: Delete a specific GCS import storage connection that was set up with WIF authentication. @@ -13168,6 +13276,12 @@ paths: summary: Delete GCS WIF import storage tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - gcswif + x-fern-sdk-method-name: delete get: description: Get a specific GCS import storage connection that was set up with WIF. operationId: api_storages_gcswif_retrieve @@ -13189,6 +13303,12 @@ paths: summary: Get GCS WIF import storage tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - gcswif + x-fern-sdk-method-name: get patch: description: Update a specific GCS import storage connection that was set up with WIF authentication. operationId: api_storages_gcswif_partial_update @@ -13221,6 +13341,12 @@ paths: summary: Update GCS WIF import storage tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - gcswif + x-fern-sdk-method-name: update /api/storages/gcswif/{id}/sync: post: description: Sync tasks from an GCS import storage connection that was set up with WIF authentication. @@ -13231,18 +13357,6 @@ paths: required: true schema: type: integer - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/GCSWIFImportStorageRequest' - application/x-www-form-urlencoded: - schema: - $ref: '#/components/schemas/GCSWIFImportStorageRequest' - multipart/form-data: - schema: - $ref: '#/components/schemas/GCSWIFImportStorageRequest' - required: true responses: '200': content: @@ -13255,6 +13369,12 @@ paths: summary: Sync GCS WIF import storage tags: - 'Storage: GCS WIF' + x-fern-audiences: + - public + x-fern-sdk-group-name: + - import_storage + - gcswif + x-fern-sdk-method-name: sync /api/storages/localfiles/: get: description: Get a list of all local file import storage connections. diff --git a/reference.md b/reference.md index b270df87d..5ff0dfff0 100644 --- a/reference.md +++ b/reference.md @@ -11915,8 +11915,8 @@ client.export_storage.azure.sync( -## ExportStorage Gcs -
client.export_storage.gcs.list(...) +## ExportStorage AzureSpi +
client.export_storage.azure_spi.list(...)
@@ -11928,7 +11928,7 @@ client.export_storage.azure.sync(
-Get a list of all GCS export storage connections. +Get a list of all Azure export storage connections that were set up with Service Principal authentication.
@@ -11948,7 +11948,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.gcs.list() +client.export_storage.azure_spi.list() ``` @@ -11992,7 +11992,7 @@ client.export_storage.gcs.list()
-
client.export_storage.gcs.create(...) +
client.export_storage.azure_spi.create(...)
@@ -12004,7 +12004,7 @@ client.export_storage.gcs.list()
-Create a new GCS export storage connection to store annotations. +Create an Azure export storage connection with Service Principal authentication to store annotations.
@@ -12024,7 +12024,9 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.gcs.create() +client.export_storage.azure_spi.create( + project=1, +) ``` @@ -12040,7 +12042,7 @@ client.export_storage.gcs.create()
-**bucket:** `typing.Optional[str]` — GCS bucket name +**project:** `int` — A unique integer value identifying this project.
@@ -12048,7 +12050,7 @@ client.export_storage.gcs.create()
-**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**account_name:** `typing.Optional[str]` — Azure Blob account name
@@ -12056,7 +12058,7 @@ client.export_storage.gcs.create()
-**description:** `typing.Optional[str]` — Storage description +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled
@@ -12064,7 +12066,7 @@ client.export_storage.gcs.create()
-**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. +**client_id:** `typing.Optional[str]` — Azure Blob Service Principal Client ID
@@ -12072,7 +12074,7 @@ client.export_storage.gcs.create()
-**google_project_id:** `typing.Optional[str]` — Google project ID +**client_secret:** `typing.Optional[str]` — Azure Blob Service Principal Client Secret
@@ -12080,7 +12082,7 @@ client.export_storage.gcs.create()
-**prefix:** `typing.Optional[str]` — GCS bucket prefix +**container:** `typing.Optional[str]` — Azure blob container
@@ -12088,7 +12090,7 @@ client.export_storage.gcs.create()
-**project:** `typing.Optional[int]` — Project ID +**description:** `typing.Optional[str]` — Cloud storage description
@@ -12096,7 +12098,7 @@ client.export_storage.gcs.create()
-**title:** `typing.Optional[str]` — Storage title +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time
@@ -12104,67 +12106,31 @@ client.export_storage.gcs.create()
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time
- -
- - - - -
- -
client.export_storage.gcs.validate(...) -
-
- -#### 📝 Description - -
-
-Validate a specific GCS export storage connection. -
-
+**last_sync_job:** `typing.Optional[str]` — Last sync job ID +
-#### 🔌 Usage -
-
-
- -```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.gcs.validate() - -``` -
-
+**meta:** `typing.Optional[typing.Optional[typing.Any]]` +
-#### ⚙️ Parameters - -
-
-
-**bucket:** `typing.Optional[str]` — GCS bucket name +**prefix:** `typing.Optional[str]` — Azure blob prefix name
@@ -12172,7 +12138,7 @@ client.export_storage.gcs.validate()
-**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects
@@ -12180,7 +12146,7 @@ client.export_storage.gcs.validate()
-**description:** `typing.Optional[str]` — Storage description +**status:** `typing.Optional[StatusC5AEnum]`
@@ -12188,7 +12154,7 @@ client.export_storage.gcs.validate()
-**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. +**synchronizable:** `typing.Optional[bool]`
@@ -12196,7 +12162,7 @@ client.export_storage.gcs.validate()
-**google_project_id:** `typing.Optional[str]` — Google project ID +**tenant_id:** `typing.Optional[str]` — Azure Tenant ID
@@ -12204,7 +12170,7 @@ client.export_storage.gcs.validate()
-**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated +**title:** `typing.Optional[str]` — Cloud storage title
@@ -12212,7 +12178,7 @@ client.export_storage.gcs.validate()
-**prefix:** `typing.Optional[str]` — GCS bucket prefix +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync
@@ -12220,7 +12186,7 @@ client.export_storage.gcs.validate()
-**project:** `typing.Optional[int]` — Project ID +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs
@@ -12228,7 +12194,7 @@ client.export_storage.gcs.validate()
-**title:** `typing.Optional[str]` — Storage title +**user_delegation_key:** `typing.Optional[str]` — User Delegation Key (Backend)
@@ -12248,7 +12214,7 @@ client.export_storage.gcs.validate()
-
client.export_storage.gcs.get(...) +
client.export_storage.azure_spi.validate(...)
@@ -12260,7 +12226,7 @@ client.export_storage.gcs.validate()
-Get a specific GCS export storage connection. +Validate a specific Azure export storage connection that was set up with Service Principal authentication.
@@ -12280,8 +12246,8 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.gcs.get( - id=1, +client.export_storage.azure_spi.validate( + project=1, ) ``` @@ -12298,7 +12264,7 @@ client.export_storage.gcs.get(
-**id:** `int` +**project:** `int` — A unique integer value identifying this project.
@@ -12306,69 +12272,39 @@ client.export_storage.gcs.get(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**account_name:** `typing.Optional[str]` — Azure Blob account name
- -
- - - -
- -
client.export_storage.gcs.delete(...)
-#### 📝 Description - -
-
+**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled + +
+
-Delete a specific GCS export storage connection. -
-
+**client_id:** `typing.Optional[str]` — Azure Blob Service Principal Client ID +
-#### 🔌 Usage -
-
-
- -```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.gcs.delete( - id=1, -) - -``` -
-
+**client_secret:** `typing.Optional[str]` — Azure Blob Service Principal Client Secret +
-#### ⚙️ Parameters - -
-
-
-**id:** `int` +**container:** `typing.Optional[str]` — Azure blob container
@@ -12376,69 +12312,47 @@ client.export_storage.gcs.delete(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**description:** `typing.Optional[str]` — Cloud storage description
-
-
+
+
+**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time +
-
-
client.export_storage.gcs.update(...)
-#### 📝 Description - -
-
+**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time + +
+
-Update a specific GCS export storage connection. -
-
+**last_sync_job:** `typing.Optional[str]` — Last sync job ID +
-#### 🔌 Usage - -
-
-
-```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.gcs.update( - id=1, -) - -``` -
-
+**meta:** `typing.Optional[typing.Optional[typing.Any]]` +
-#### ⚙️ Parameters - -
-
-
-**id:** `int` +**prefix:** `typing.Optional[str]` — Azure blob prefix name
@@ -12446,7 +12360,7 @@ client.export_storage.gcs.update(
-**bucket:** `typing.Optional[str]` — GCS bucket name +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects
@@ -12454,7 +12368,7 @@ client.export_storage.gcs.update(
-**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**status:** `typing.Optional[StatusC5AEnum]`
@@ -12462,7 +12376,7 @@ client.export_storage.gcs.update(
-**description:** `typing.Optional[str]` — Storage description +**synchronizable:** `typing.Optional[bool]`
@@ -12470,7 +12384,7 @@ client.export_storage.gcs.update(
-**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. +**tenant_id:** `typing.Optional[str]` — Azure Tenant ID
@@ -12478,7 +12392,7 @@ client.export_storage.gcs.update(
-**google_project_id:** `typing.Optional[str]` — Google project ID +**title:** `typing.Optional[str]` — Cloud storage title
@@ -12486,7 +12400,7 @@ client.export_storage.gcs.update(
-**prefix:** `typing.Optional[str]` — GCS bucket prefix +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync
@@ -12494,7 +12408,7 @@ client.export_storage.gcs.update(
-**project:** `typing.Optional[int]` — Project ID +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs
@@ -12502,7 +12416,7 @@ client.export_storage.gcs.update(
-**title:** `typing.Optional[str]` — Storage title +**user_delegation_key:** `typing.Optional[str]` — User Delegation Key (Backend)
@@ -12522,7 +12436,7 @@ client.export_storage.gcs.update(
-
client.export_storage.gcs.sync(...) +
client.export_storage.azure_spi.get(...)
@@ -12534,7 +12448,7 @@ client.export_storage.gcs.update(
-Sync tasks from an GCS export storage connection. +Get a specific Azure export storage connection that was set up with Service Principal authentication.
@@ -12554,7 +12468,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.gcs.sync( +client.export_storage.azure_spi.get( id=1, ) @@ -12592,8 +12506,7 @@ client.export_storage.gcs.sync(
-## ExportStorage Local -
client.export_storage.local.list(...) +
client.export_storage.azure_spi.delete(...)
@@ -12605,7 +12518,7 @@ client.export_storage.gcs.sync(
-Get a list of all local file export storage connections. +Delete a specific Azure export storage connection that was set up with Service Principal authentication.
@@ -12625,7 +12538,9 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.local.list() +client.export_storage.azure_spi.delete( + id=1, +) ``` @@ -12641,15 +12556,7 @@ client.export_storage.local.list()
-**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. - -
-
- -
-
- -**project:** `typing.Optional[int]` — Project ID +**id:** `int`
@@ -12669,7 +12576,7 @@ client.export_storage.local.list()
-
client.export_storage.local.create(...) +
client.export_storage.azure_spi.update(...)
@@ -12681,7 +12588,7 @@ client.export_storage.local.list()
-Create a new local file export storage connection to store annotations. +Update a specific Azure export storage connection that was set up with Service Principal authentication.
@@ -12701,7 +12608,9 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.local.create() +client.export_storage.azure_spi.update( + id=1, +) ``` @@ -12717,7 +12626,7 @@ client.export_storage.local.create()
-**description:** `typing.Optional[str]` — Storage description +**id:** `int`
@@ -12725,7 +12634,7 @@ client.export_storage.local.create()
-**path:** `typing.Optional[str]` — Path to local directory +**account_name:** `typing.Optional[str]` — Azure Blob account name
@@ -12733,7 +12642,7 @@ client.export_storage.local.create()
-**project:** `typing.Optional[int]` — Project ID +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled
@@ -12741,7 +12650,7 @@ client.export_storage.local.create()
-**regex_filter:** `typing.Optional[str]` — Regex for filtering objects +**client_id:** `typing.Optional[str]` — Azure Blob Service Principal Client ID
@@ -12749,7 +12658,7 @@ client.export_storage.local.create()
-**title:** `typing.Optional[str]` — Storage title +**client_secret:** `typing.Optional[str]` — Azure Blob Service Principal Client Secret
@@ -12757,7 +12666,7 @@ client.export_storage.local.create()
-**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**container:** `typing.Optional[str]` — Azure blob container
@@ -12765,67 +12674,71 @@ client.export_storage.local.create()
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**description:** `typing.Optional[str]` — Cloud storage description
- -
+
+
+**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time +
-
-
client.export_storage.local.validate(...)
-#### 📝 Description - -
-
+**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time + +
+
-Validate a specific local file export storage connection. -
-
+**last_sync_job:** `typing.Optional[str]` — Last sync job ID +
-#### 🔌 Usage -
+**meta:** `typing.Optional[typing.Optional[typing.Any]]` + +
+
+
-```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.local.validate() - -``` +**prefix:** `typing.Optional[str]` — Azure blob prefix name +
+ +
+
+ +**project:** `typing.Optional[int]` — A unique integer value identifying this project. +
-#### ⚙️ Parameters -
+**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects + +
+
+
-**description:** `typing.Optional[str]` — Storage description +**status:** `typing.Optional[StatusC5AEnum]`
@@ -12833,7 +12746,7 @@ client.export_storage.local.validate()
-**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated +**synchronizable:** `typing.Optional[bool]`
@@ -12841,7 +12754,7 @@ client.export_storage.local.validate()
-**path:** `typing.Optional[str]` — Path to local directory +**tenant_id:** `typing.Optional[str]` — Azure Tenant ID
@@ -12849,7 +12762,7 @@ client.export_storage.local.validate()
-**project:** `typing.Optional[int]` — Project ID +**title:** `typing.Optional[str]` — Cloud storage title
@@ -12857,7 +12770,7 @@ client.export_storage.local.validate()
-**regex_filter:** `typing.Optional[str]` — Regex for filtering objects +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync
@@ -12865,7 +12778,7 @@ client.export_storage.local.validate()
-**title:** `typing.Optional[str]` — Storage title +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs
@@ -12873,7 +12786,7 @@ client.export_storage.local.validate()
-**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**user_delegation_key:** `typing.Optional[str]` — User Delegation Key (Backend)
@@ -12893,7 +12806,7 @@ client.export_storage.local.validate()
-
client.export_storage.local.get(...) +
client.export_storage.azure_spi.sync(...)
@@ -12905,7 +12818,7 @@ client.export_storage.local.validate()
-Get a specific local file export storage connection. +Sync tasks from an Azure SPI export storage.
@@ -12925,7 +12838,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.local.get( +client.export_storage.azure_spi.sync( id=1, ) @@ -12963,7 +12876,8 @@ client.export_storage.local.get(
-
client.export_storage.local.delete(...) +## ExportStorage Gcs +
client.export_storage.gcs.list(...)
@@ -12975,7 +12889,7 @@ client.export_storage.local.get(
-Delete a specific local file export storage connection. +Get a list of all GCS export storage connections.
@@ -12995,9 +12909,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.local.delete( - id=1, -) +client.export_storage.gcs.list() ``` @@ -13013,7 +12925,15 @@ client.export_storage.local.delete(
-**id:** `int` +**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID
@@ -13033,7 +12953,7 @@ client.export_storage.local.delete(
-
client.export_storage.local.update(...) +
client.export_storage.gcs.create(...)
@@ -13045,7 +12965,7 @@ client.export_storage.local.delete(
-Update a specific local file export storage connection. +Create a new GCS export storage connection to store annotations.
@@ -13065,9 +12985,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.local.update( - id=1, -) +client.export_storage.gcs.create() ``` @@ -13083,7 +13001,15 @@ client.export_storage.local.update(
-**id:** `int` +**bucket:** `typing.Optional[str]` — GCS bucket name + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
@@ -13099,7 +13025,7 @@ client.export_storage.local.update(
-**path:** `typing.Optional[str]` — Path to local directory +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details.
@@ -13107,7 +13033,7 @@ client.export_storage.local.update(
-**project:** `typing.Optional[int]` — Project ID +**google_project_id:** `typing.Optional[str]` — Google project ID
@@ -13115,7 +13041,7 @@ client.export_storage.local.update(
-**regex_filter:** `typing.Optional[str]` — Regex for filtering objects +**prefix:** `typing.Optional[str]` — GCS bucket prefix
@@ -13123,7 +13049,7 @@ client.export_storage.local.update(
-**title:** `typing.Optional[str]` — Storage title +**project:** `typing.Optional[int]` — Project ID
@@ -13131,7 +13057,7 @@ client.export_storage.local.update(
-**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**title:** `typing.Optional[str]` — Storage title
@@ -13151,7 +13077,7 @@ client.export_storage.local.update(
-
client.export_storage.local.sync(...) +
client.export_storage.gcs.validate(...)
@@ -13163,7 +13089,7 @@ client.export_storage.local.update(
-Sync tasks from a local file export storage connection. +Validate a specific GCS export storage connection.
@@ -13183,9 +13109,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.local.sync( - id=1, -) +client.export_storage.gcs.validate() ``` @@ -13201,7 +13125,7 @@ client.export_storage.local.sync(
-**id:** `int` +**bucket:** `typing.Optional[str]` — GCS bucket name
@@ -13209,68 +13133,55 @@ client.export_storage.local.sync(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
- -
+
+
+**description:** `typing.Optional[str]` — Storage description +
-
-## ExportStorage Redis -
client.export_storage.redis.list(...)
-#### 📝 Description - -
-
+**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. + +
+
-Get a list of all Redis export storage connections. -
-
+**google_project_id:** `typing.Optional[str]` — Google project ID +
-#### 🔌 Usage - -
-
-
-```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.redis.list() - -``` -
-
+**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated +
-#### ⚙️ Parameters -
+**prefix:** `typing.Optional[str]` — GCS bucket prefix + +
+
+
-**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. +**project:** `typing.Optional[int]` — Project ID
@@ -13278,7 +13189,7 @@ client.export_storage.redis.list()
-**project:** `typing.Optional[int]` — Project ID +**title:** `typing.Optional[str]` — Storage title
@@ -13298,7 +13209,7 @@ client.export_storage.redis.list()
-
client.export_storage.redis.create(...) +
client.export_storage.gcs.get(...)
@@ -13310,7 +13221,7 @@ client.export_storage.redis.list()
-Create a new Redis export storage connection to store annotations. +Get a specific GCS export storage connection.
@@ -13330,7 +13241,9 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.redis.create() +client.export_storage.gcs.get( + id=1, +) ``` @@ -13346,7 +13259,7 @@ client.export_storage.redis.create()
-**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**id:** `int`
@@ -13354,63 +13267,69 @@ client.export_storage.redis.create()
-**db:** `typing.Optional[int]` — Database ID of database to use +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+ +
-
-
-**description:** `typing.Optional[str]` — Storage description -
+
+
client.export_storage.gcs.delete(...)
-**host:** `typing.Optional[str]` — Server Host IP (optional) - -
-
+#### 📝 Description
-**password:** `typing.Optional[str]` — Server Password (optional) - -
-
-
-**path:** `typing.Optional[str]` — Storage prefix (optional) - +Delete a specific GCS export storage connection. +
+
+#### 🔌 Usage +
-**port:** `typing.Optional[str]` — Server Port (optional) - -
-
-
-**project:** `typing.Optional[int]` — Project ID - +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.gcs.delete( + id=1, +) + +```
+ + + +#### ⚙️ Parameters
-**title:** `typing.Optional[str]` — Storage title +
+
+ +**id:** `int`
@@ -13430,7 +13349,7 @@ client.export_storage.redis.create()
-
client.export_storage.redis.validate(...) +
client.export_storage.gcs.update(...)
@@ -13442,7 +13361,7 @@ client.export_storage.redis.create()
-Validate a specific Redis export storage connection. +Update a specific GCS export storage connection.
@@ -13462,7 +13381,9 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.redis.validate() +client.export_storage.gcs.update( + id=1, +) ``` @@ -13478,15 +13399,7 @@ client.export_storage.redis.validate()
-**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. - -
-
- -
-
- -**db:** `typing.Optional[int]` — Database ID of database to use +**id:** `int`
@@ -13494,7 +13407,7 @@ client.export_storage.redis.validate()
-**description:** `typing.Optional[str]` — Storage description +**bucket:** `typing.Optional[str]` — GCS bucket name
@@ -13502,7 +13415,7 @@ client.export_storage.redis.validate()
-**host:** `typing.Optional[str]` — Server Host IP (optional) +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled.
@@ -13510,7 +13423,7 @@ client.export_storage.redis.validate()
-**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated +**description:** `typing.Optional[str]` — Storage description
@@ -13518,7 +13431,7 @@ client.export_storage.redis.validate()
-**password:** `typing.Optional[str]` — Server Password (optional) +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details.
@@ -13526,7 +13439,7 @@ client.export_storage.redis.validate()
-**path:** `typing.Optional[str]` — Storage prefix (optional) +**google_project_id:** `typing.Optional[str]` — Google project ID
@@ -13534,7 +13447,7 @@ client.export_storage.redis.validate()
-**port:** `typing.Optional[str]` — Server Port (optional) +**prefix:** `typing.Optional[str]` — GCS bucket prefix
@@ -13570,7 +13483,7 @@ client.export_storage.redis.validate()
-
client.export_storage.redis.get(...) +
client.export_storage.gcs.sync(...)
@@ -13582,7 +13495,7 @@ client.export_storage.redis.validate()
-Get a specific Redis export storage connection. +Sync tasks from an GCS export storage connection.
@@ -13602,7 +13515,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.redis.get( +client.export_storage.gcs.sync( id=1, ) @@ -13640,7 +13553,8 @@ client.export_storage.redis.get(
-
client.export_storage.redis.delete(...) +## ExportStorage Gcswif +
client.export_storage.gcswif.list(...)
@@ -13652,7 +13566,7 @@ client.export_storage.redis.get(
-Delete a specific Redis export storage connection. +Get a list of all GCS export storage connections that were set up with WIF authentication.
@@ -13672,9 +13586,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.redis.delete( - id=1, -) +client.export_storage.gcswif.list() ``` @@ -13690,7 +13602,15 @@ client.export_storage.redis.delete(
-**id:** `int` +**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID
@@ -13710,7 +13630,7 @@ client.export_storage.redis.delete(
-
client.export_storage.redis.update(...) +
client.export_storage.gcswif.create(...)
@@ -13722,7 +13642,7 @@ client.export_storage.redis.delete(
-Update a specific Redis export storage connection. +Create an GCS export storage connection with WIF authentication to store annotations.
@@ -13742,8 +13662,8 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.redis.update( - id=1, +client.export_storage.gcswif.create( + project=1, ) ``` @@ -13760,7 +13680,7 @@ client.export_storage.redis.update(
-**id:** `int` +**project:** `int` — A unique integer value identifying this project.
@@ -13768,7 +13688,7 @@ client.export_storage.redis.update(
-**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**bucket:** `typing.Optional[str]` — GCS bucket name
@@ -13776,7 +13696,7 @@ client.export_storage.redis.update(
-**db:** `typing.Optional[int]` — Database ID of database to use +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled
@@ -13784,7 +13704,7 @@ client.export_storage.redis.update(
-**description:** `typing.Optional[str]` — Storage description +**description:** `typing.Optional[str]` — Cloud storage description
@@ -13792,7 +13712,7 @@ client.export_storage.redis.update(
-**host:** `typing.Optional[str]` — Server Host IP (optional) +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file
@@ -13800,7 +13720,7 @@ client.export_storage.redis.update(
-**password:** `typing.Optional[str]` — Server Password (optional) +**google_project_id:** `typing.Optional[str]` — Google project ID
@@ -13808,7 +13728,7 @@ client.export_storage.redis.update(
-**path:** `typing.Optional[str]` — Storage prefix (optional) +**google_project_number:** `typing.Optional[str]` — Google project number
@@ -13816,7 +13736,7 @@ client.export_storage.redis.update(
-**port:** `typing.Optional[str]` — Server Port (optional) +**google_service_account_email:** `typing.Optional[str]` — Google service account email
@@ -13824,7 +13744,7 @@ client.export_storage.redis.update(
-**project:** `typing.Optional[int]` — Project ID +**google_wif_pool_id:** `typing.Optional[str]` — Google WIF pool ID
@@ -13832,7 +13752,7 @@ client.export_storage.redis.update(
-**title:** `typing.Optional[str]` — Storage title +**google_wif_provider_id:** `typing.Optional[str]` — Google WIF provider ID
@@ -13840,69 +13760,87 @@ client.export_storage.redis.update(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time
- -
+
+
+**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time +
-
-
client.export_storage.redis.sync(...)
-#### 📝 Description +**last_sync_job:** `typing.Optional[str]` — Last sync job ID + +
+
+**meta:** `typing.Optional[typing.Optional[typing.Any]]` + +
+
+
-Sync tasks from a Redis export storage connection. -
-
+**prefix:** `typing.Optional[str]` — GCS bucket prefix + -#### 🔌 Usage -
+**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects + +
+
+
-```python -from label_studio_sdk import LabelStudio +**status:** `typing.Optional[StatusC5AEnum]` + +
+
-client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.redis.sync( - id=1, -) +
+
-``` +**synchronizable:** `typing.Optional[bool]` +
+ +
+
+ +**title:** `typing.Optional[str]` — Cloud storage title +
-#### ⚙️ Parameters -
+**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync + +
+
+
-**id:** `int` +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs
@@ -13922,8 +13860,7 @@ client.export_storage.redis.sync(
-## ExportStorage S3 -
client.export_storage.s3.list(...) +
client.export_storage.gcswif.validate(...)
@@ -13935,7 +13872,7 @@ client.export_storage.redis.sync(
-Get a list of all S3 export storage connections. +Validate a specific GCS export storage connection that was set up with WIF authentication.
@@ -13955,7 +13892,9 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3.list() +client.export_storage.gcswif.validate( + project=1, +) ``` @@ -13971,7 +13910,7 @@ client.export_storage.s3.list()
-**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. +**project:** `int` — A unique integer value identifying this project.
@@ -13979,7 +13918,7 @@ client.export_storage.s3.list()
-**project:** `typing.Optional[int]` — Project ID +**bucket:** `typing.Optional[str]` — GCS bucket name
@@ -13987,67 +13926,63 @@ client.export_storage.s3.list()
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled
- -
+
+
+**description:** `typing.Optional[str]` — Cloud storage description +
-
-
client.export_storage.s3.create(...)
-#### 📝 Description - -
-
+**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file + +
+
-Create a new S3 export storage connection to store annotations. -
-
+**google_project_id:** `typing.Optional[str]` — Google project ID +
-#### 🔌 Usage -
+**google_project_number:** `typing.Optional[str]` — Google project number + +
+
+
-```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.s3.create() - -``` -
-
+**google_service_account_email:** `typing.Optional[str]` — Google service account email + -#### ⚙️ Parameters -
+**google_wif_pool_id:** `typing.Optional[str]` — Google WIF pool ID + +
+
+
-**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID +**google_wif_provider_id:** `typing.Optional[str]` — Google WIF provider ID
@@ -14055,7 +13990,7 @@ client.export_storage.s3.create()
-**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time
@@ -14063,7 +13998,7 @@ client.export_storage.s3.create()
-**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN +**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time
@@ -14071,7 +14006,7 @@ client.export_storage.s3.create()
-**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID +**last_sync_job:** `typing.Optional[str]` — Last sync job ID
@@ -14079,7 +14014,7 @@ client.export_storage.s3.create()
-**bucket:** `typing.Optional[str]` — S3 bucket name +**meta:** `typing.Optional[typing.Optional[typing.Any]]`
@@ -14087,7 +14022,7 @@ client.export_storage.s3.create()
-**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**prefix:** `typing.Optional[str]` — GCS bucket prefix
@@ -14095,7 +14030,7 @@ client.export_storage.s3.create()
-**description:** `typing.Optional[str]` — Storage description +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects
@@ -14103,7 +14038,7 @@ client.export_storage.s3.create()
-**prefix:** `typing.Optional[str]` — S3 bucket prefix +**status:** `typing.Optional[StatusC5AEnum]`
@@ -14111,7 +14046,7 @@ client.export_storage.s3.create()
-**project:** `typing.Optional[int]` — Project ID +**synchronizable:** `typing.Optional[bool]`
@@ -14119,7 +14054,7 @@ client.export_storage.s3.create()
-**region_name:** `typing.Optional[str]` — AWS Region +**title:** `typing.Optional[str]` — Cloud storage title
@@ -14127,7 +14062,7 @@ client.export_storage.s3.create()
-**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync
@@ -14135,7 +14070,7 @@ client.export_storage.s3.create()
-**title:** `typing.Optional[str]` — Storage title +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs
@@ -14155,7 +14090,7 @@ client.export_storage.s3.create()
-
client.export_storage.s3.validate(...) +
client.export_storage.gcswif.get(...)
@@ -14167,7 +14102,7 @@ client.export_storage.s3.create()
-Validate a specific S3 export storage connection. +Get a specific GCS export storage connection that was set up with WIF authentication.
@@ -14187,7 +14122,9 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3.validate() +client.export_storage.gcswif.get( + id=1, +) ``` @@ -14203,7 +14140,7 @@ client.export_storage.s3.validate()
-**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID +**id:** `int`
@@ -14211,71 +14148,69 @@ client.export_storage.s3.validate()
-**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+ +
-
-
-**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN -
+
+
client.export_storage.gcswif.delete(...)
-**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID - -
-
+#### 📝 Description
-**bucket:** `typing.Optional[str]` — S3 bucket name - -
-
-
-**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. - +Delete a specific GCS export storage connection that was set up with WIF authentication. +
+
+#### 🔌 Usage +
-**description:** `typing.Optional[str]` — Storage description - -
-
-
-**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated - +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.gcswif.delete( + id=1, +) + +``` +
+
+#### ⚙️ Parameters +
-**prefix:** `typing.Optional[str]` — S3 bucket prefix - -
-
-
-**project:** `typing.Optional[int]` — Project ID +**id:** `int`
@@ -14283,43 +14218,19 @@ client.export_storage.s3.validate()
-**region_name:** `typing.Optional[str]` — AWS Region +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+ + -
-
-**s3endpoint:** `typing.Optional[str]` — S3 Endpoint -
+
-
-
- -**title:** `typing.Optional[str]` — Storage title - -
-
- -
-
- -**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - -
-
- - - - - - -
- -
client.export_storage.s3.get(...) +
client.export_storage.gcswif.update(...)
@@ -14331,7 +14242,7 @@ client.export_storage.s3.validate()
-Get a specific S3 export storage connection. +Update a specific GCS export storage connection that was set up with WIF authentication.
@@ -14351,7 +14262,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3.get( +client.export_storage.gcswif.update( id=1, ) @@ -14377,69 +14288,31 @@ client.export_storage.s3.get(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**bucket:** `typing.Optional[str]` — GCS bucket name
- -
- - - - -
- -
client.export_storage.s3.delete(...) -
-
- -#### 📝 Description - -
-
-Delete a specific S3 export storage connection. -
-
+**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled +
-#### 🔌 Usage - -
-
-
-```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.s3.delete( - id=1, -) - -``` -
-
+**description:** `typing.Optional[str]` — Cloud storage description +
-#### ⚙️ Parameters - -
-
-
-**id:** `int` +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file
@@ -14447,69 +14320,39 @@ client.export_storage.s3.delete(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**google_project_id:** `typing.Optional[str]` — Google project ID
-
-
- - -
-
-
-
client.export_storage.s3.update(...)
-#### 📝 Description - -
-
+**google_project_number:** `typing.Optional[str]` — Google project number + +
+
-Update a specific S3 export storage connection. -
-
+**google_service_account_email:** `typing.Optional[str]` — Google service account email +
-#### 🔌 Usage - -
-
-
-```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.s3.update( - id=1, -) - -``` -
-
+**google_wif_pool_id:** `typing.Optional[str]` — Google WIF pool ID +
-#### ⚙️ Parameters -
-
-
- -**id:** `int` +**google_wif_provider_id:** `typing.Optional[str]` — Google WIF provider ID
@@ -14517,7 +14360,7 @@ client.export_storage.s3.update(
-**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time
@@ -14525,7 +14368,7 @@ client.export_storage.s3.update(
-**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY +**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time
@@ -14533,7 +14376,7 @@ client.export_storage.s3.update(
-**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN +**last_sync_job:** `typing.Optional[str]` — Last sync job ID
@@ -14541,7 +14384,7 @@ client.export_storage.s3.update(
-**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID +**meta:** `typing.Optional[typing.Optional[typing.Any]]`
@@ -14549,7 +14392,7 @@ client.export_storage.s3.update(
-**bucket:** `typing.Optional[str]` — S3 bucket name +**prefix:** `typing.Optional[str]` — GCS bucket prefix
@@ -14557,7 +14400,7 @@ client.export_storage.s3.update(
-**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. +**project:** `typing.Optional[int]` — A unique integer value identifying this project.
@@ -14565,7 +14408,7 @@ client.export_storage.s3.update(
-**description:** `typing.Optional[str]` — Storage description +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects
@@ -14573,7 +14416,7 @@ client.export_storage.s3.update(
-**prefix:** `typing.Optional[str]` — S3 bucket prefix +**status:** `typing.Optional[StatusC5AEnum]`
@@ -14581,7 +14424,7 @@ client.export_storage.s3.update(
-**project:** `typing.Optional[int]` — Project ID +**synchronizable:** `typing.Optional[bool]`
@@ -14589,7 +14432,7 @@ client.export_storage.s3.update(
-**region_name:** `typing.Optional[str]` — AWS Region +**title:** `typing.Optional[str]` — Cloud storage title
@@ -14597,7 +14440,7 @@ client.export_storage.s3.update(
-**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync
@@ -14605,7 +14448,7 @@ client.export_storage.s3.update(
-**title:** `typing.Optional[str]` — Storage title +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs
@@ -14625,7 +14468,7 @@ client.export_storage.s3.update(
-
client.export_storage.s3.sync(...) +
client.export_storage.gcswif.sync(...)
@@ -14637,7 +14480,7 @@ client.export_storage.s3.update(
-Sync tasks from an S3 export storage connection. +Sync tasks from an GCS WIF export storage.
@@ -14657,7 +14500,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3.sync( +client.export_storage.gcswif.sync( id=1, ) @@ -14695,8 +14538,8 @@ client.export_storage.s3.sync(
-## ExportStorage S3S -
client.export_storage.s3s.list(...) +## ExportStorage Local +
client.export_storage.local.list(...)
@@ -14708,7 +14551,7 @@ client.export_storage.s3.sync(
-Get a list of all S3 export storage connections that were set up with IAM role access. +Get a list of all local file export storage connections.
@@ -14728,7 +14571,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3s.list() +client.export_storage.local.list() ``` @@ -14772,7 +14615,7 @@ client.export_storage.s3s.list()
-
client.export_storage.s3s.create(...) +
client.export_storage.local.create(...)
@@ -14784,7 +14627,7 @@ client.export_storage.s3s.list()
-Create an S3 export storage connection with IAM role access to store annotations. +Create a new local file export storage connection to store annotations.
@@ -14804,10 +14647,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3s.create( - project=1, - role_arn="role_arn", -) +client.export_storage.local.create() ``` @@ -14823,7 +14663,7 @@ client.export_storage.s3s.create(
-**project:** `int` — A unique integer value identifying this project. +**description:** `typing.Optional[str]` — Storage description
@@ -14831,7 +14671,7 @@ client.export_storage.s3s.create(
-**role_arn:** `str` — AWS RoleArn +**path:** `typing.Optional[str]` — Path to local directory
@@ -14839,7 +14679,7 @@ client.export_storage.s3s.create(
-**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID +**project:** `typing.Optional[int]` — Project ID
@@ -14847,7 +14687,7 @@ client.export_storage.s3s.create(
-**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects
@@ -14855,7 +14695,7 @@ client.export_storage.s3s.create(
-**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN +**title:** `typing.Optional[str]` — Storage title
@@ -14863,7 +14703,7 @@ client.export_storage.s3s.create(
-**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
@@ -14871,95 +14711,67 @@ client.export_storage.s3s.create(
-**bucket:** `typing.Optional[str]` — S3 bucket name +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
- -
-
- -**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled -
-
-
-**description:** `typing.Optional[str]` — Cloud storage description -
+
+
client.export_storage.local.validate(...)
-**external_id:** `typing.Optional[str]` — AWS ExternalId - -
-
+#### 📝 Description
-**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time - -
-
-
-**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time - +Validate a specific local file export storage connection.
- -
-
- -**last_sync_job:** `typing.Optional[str]` — Last sync job ID -
+#### 🔌 Usage +
-**legacy_auth:** `typing.Optional[bool]` - -
-
-
-**meta:** `typing.Optional[typing.Optional[typing.Any]]` - -
-
+```python +from label_studio_sdk import LabelStudio -
-
+client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.local.validate() -**prefix:** `typing.Optional[str]` — S3 bucket prefix - +``` +
+
+#### ⚙️ Parameters +
-**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects - -
-
-
-**region_name:** `typing.Optional[str]` — AWS Region +**description:** `typing.Optional[str]` — Storage description
@@ -14967,7 +14779,7 @@ client.export_storage.s3s.create(
-**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated
@@ -14975,7 +14787,7 @@ client.export_storage.s3s.create(
-**status:** `typing.Optional[StatusC5AEnum]` +**path:** `typing.Optional[str]` — Path to local directory
@@ -14983,7 +14795,7 @@ client.export_storage.s3s.create(
-**synchronizable:** `typing.Optional[bool]` +**project:** `typing.Optional[int]` — Project ID
@@ -14991,7 +14803,7 @@ client.export_storage.s3s.create(
-**title:** `typing.Optional[str]` — Cloud storage title +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects
@@ -14999,7 +14811,7 @@ client.export_storage.s3s.create(
-**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync +**title:** `typing.Optional[str]` — Storage title
@@ -15007,7 +14819,7 @@ client.export_storage.s3s.create(
-**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
@@ -15027,7 +14839,4294 @@ client.export_storage.s3s.create(
-
client.export_storage.s3s.validate(...) +
client.export_storage.local.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific local file export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.local.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.local.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific local file export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.local.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.local.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific local file export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.local.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Path to local directory + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Regex for filtering objects + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your directory contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.local.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks from a local file export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.local.sync( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ExportStorage Redis +
client.export_storage.redis.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a list of all Redis export storage connections. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.redis.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new Redis export storage connection to store annotations. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**db:** `typing.Optional[int]` — Database ID of database to use + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**host:** `typing.Optional[str]` — Server Host IP (optional) + +
+
+ +
+
+ +**password:** `typing.Optional[str]` — Server Password (optional) + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Storage prefix (optional) + +
+
+ +
+
+ +**port:** `typing.Optional[str]` — Server Port (optional) + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.redis.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific Redis export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**db:** `typing.Optional[int]` — Database ID of database to use + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**host:** `typing.Optional[str]` — Server Host IP (optional) + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**password:** `typing.Optional[str]` — Server Password (optional) + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Storage prefix (optional) + +
+
+ +
+
+ +**port:** `typing.Optional[str]` — Server Port (optional) + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.redis.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific Redis export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.redis.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific Redis export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.redis.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific Redis export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**db:** `typing.Optional[int]` — Database ID of database to use + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**host:** `typing.Optional[str]` — Server Host IP (optional) + +
+
+ +
+
+ +**password:** `typing.Optional[str]` — Server Password (optional) + +
+
+ +
+
+ +**path:** `typing.Optional[str]` — Storage prefix (optional) + +
+
+ +
+
+ +**port:** `typing.Optional[str]` — Server Port (optional) + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.redis.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks from a Redis export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.redis.sync( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ExportStorage S3 +
client.export_storage.s3.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a list of all S3 export storage connections. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a new S3 export storage connection to store annotations. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
+
+ +
+
+ +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
+
+ +
+
+ +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
+
+ +
+
+ +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific S3 export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
+
+ +
+
+ +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
+
+ +
+
+ +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
+
+ +
+
+ +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific S3 export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific S3 export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific S3 export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
+
+ +
+
+ +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
+
+ +
+
+ +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
+
+ +
+
+ +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled. + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks from an S3 export storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3.sync( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ExportStorage S3S +
client.export_storage.s3s.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a list of all S3 export storage connections that were set up with IAM role access. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3s.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create an S3 export storage connection with IAM role access to store annotations. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.create( + project=1, + role_arn="role_arn", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**role_arn:** `str` — AWS RoleArn + +
+
+ +
+
+ +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
+
+ +
+
+ +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
+
+ +
+
+ +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
+
+ +
+
+ +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Cloud storage description + +
+
+ +
+
+ +**external_id:** `typing.Optional[str]` — AWS ExternalId + +
+
+ +
+
+ +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time + +
+
+ +
+
+ +**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time + +
+
+ +
+
+ +**last_sync_job:** `typing.Optional[str]` — Last sync job ID + +
+
+ +
+
+ +**legacy_auth:** `typing.Optional[bool]` + +
+
+ +
+
+ +**meta:** `typing.Optional[typing.Optional[typing.Any]]` + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**status:** `typing.Optional[StatusC5AEnum]` + +
+
+ +
+
+ +**synchronizable:** `typing.Optional[bool]` + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Cloud storage title + +
+
+ +
+
+ +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3s.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific S3 export storage connection that was set up with IAM role access. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.validate( + project=1, + role_arn="role_arn", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**role_arn:** `str` — AWS RoleArn + +
+
+ +
+
+ +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
+
+ +
+
+ +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
+
+ +
+
+ +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
+
+ +
+
+ +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Cloud storage description + +
+
+ +
+
+ +**external_id:** `typing.Optional[str]` — AWS ExternalId + +
+
+ +
+
+ +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time + +
+
+ +
+
+ +**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time + +
+
+ +
+
+ +**last_sync_job:** `typing.Optional[str]` — Last sync job ID + +
+
+ +
+
+ +**legacy_auth:** `typing.Optional[bool]` + +
+
+ +
+
+ +**meta:** `typing.Optional[typing.Optional[typing.Any]]` + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**status:** `typing.Optional[StatusC5AEnum]` + +
+
+ +
+
+ +**synchronizable:** `typing.Optional[bool]` + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Cloud storage title + +
+
+ +
+
+ +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3s.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific S3 export storage connection that was set up with IAM role access. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3s.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific S3 export storage connection that was set up with IAM role access. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3s.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific S3 export storage connection that was set up with IAM role access. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID + +
+
+ +
+
+ +**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY + +
+
+ +
+
+ +**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN + +
+
+ +
+
+ +**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID + +
+
+ +
+
+ +**bucket:** `typing.Optional[str]` — S3 bucket name + +
+
+ +
+
+ +**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Cloud storage description + +
+
+ +
+
+ +**external_id:** `typing.Optional[str]` — AWS ExternalId + +
+
+ +
+
+ +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time + +
+
+ +
+
+ +**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time + +
+
+ +
+
+ +**last_sync_job:** `typing.Optional[str]` — Last sync job ID + +
+
+ +
+
+ +**legacy_auth:** `typing.Optional[bool]` + +
+
+ +
+
+ +**meta:** `typing.Optional[typing.Optional[typing.Any]]` + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — S3 bucket prefix + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — A unique integer value identifying this project. + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects + +
+
+ +
+
+ +**region_name:** `typing.Optional[str]` — AWS Region + +
+
+ +
+
+ +**role_arn:** `typing.Optional[str]` — AWS RoleArn + +
+
+ +
+
+ +**s3endpoint:** `typing.Optional[str]` — S3 Endpoint + +
+
+ +
+
+ +**status:** `typing.Optional[StatusC5AEnum]` + +
+
+ +
+
+ +**synchronizable:** `typing.Optional[bool]` + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Cloud storage title + +
+
+ +
+
+ +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.export_storage.s3s.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks from an S3 export storage. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.export_storage.s3s.sync( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ImportStorage Azure +
client.import_storage.azure.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get list of all Azure import storage connections. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create new Azure import storage +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_key:** `typing.Optional[str]` — Azure Blob account key + +
+
+ +
+
+ +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
+
+ +
+
+ +**container:** `typing.Optional[str]` — Azure blob container + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for direct download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific Azure import storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.validate() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_key:** `typing.Optional[str]` — Azure Blob account key + +
+
+ +
+
+ +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
+
+ +
+
+ +**container:** `typing.Optional[str]` — Azure blob container + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for direct download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific Azure import storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific Azure import storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure.update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update a specific Azure import storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.update( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**account_key:** `typing.Optional[str]` — Azure Blob account key + +
+
+ +
+
+ +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
+
+ +
+
+ +**container:** `typing.Optional[str]` — Azure blob container + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Storage description + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` — Presign URLs for direct download + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Storage title + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure.sync(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Sync tasks from an Azure import storage connection. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure.sync( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` — Storage ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## ImportStorage AzureSpi +
client.import_storage.azure_spi.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get list of all Azure import storage connections set up with Service Principal authentication. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure_spi.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure_spi.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create Azure import storage with Service Principal authentication. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure_spi.create( + project=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
+
+ +
+
+ +**client_id:** `typing.Optional[str]` — Azure Blob Service Principal Client ID + +
+
+ +
+
+ +**client_secret:** `typing.Optional[str]` — Azure Blob Service Principal Client Secret + +
+
+ +
+
+ +**container:** `typing.Optional[str]` — Azure blob container + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Cloud storage description + +
+
+ +
+
+ +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time + +
+
+ +
+
+ +**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time + +
+
+ +
+
+ +**last_sync_job:** `typing.Optional[str]` — Last sync job ID + +
+
+ +
+
+ +**meta:** `typing.Optional[typing.Optional[typing.Any]]` + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presigned URLs TTL (in minutes) + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects + +
+
+ +
+
+ +**status:** `typing.Optional[StatusC5AEnum]` + +
+
+ +
+
+ +**synchronizable:** `typing.Optional[bool]` + +
+
+ +
+
+ +**tenant_id:** `typing.Optional[str]` — Azure Tenant ID + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Cloud storage title + +
+
+ +
+
+ +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs + +
+
+ +
+
+ +**user_delegation_key:** `typing.Optional[str]` — User Delegation Key (Backend) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure_spi.validate(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Validate a specific Azure import storage connection that was set up with Service Principal authentication. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure_spi.validate( + project=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**project:** `int` — A unique integer value identifying this project. + +
+
+ +
+
+ +**account_name:** `typing.Optional[str]` — Azure Blob account name + +
+
+ +
+
+ +**client_id:** `typing.Optional[str]` — Azure Blob Service Principal Client ID + +
+
+ +
+
+ +**client_secret:** `typing.Optional[str]` — Azure Blob Service Principal Client Secret + +
+
+ +
+
+ +**container:** `typing.Optional[str]` — Azure blob container + +
+
+ +
+
+ +**description:** `typing.Optional[str]` — Cloud storage description + +
+
+ +
+
+ +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time + +
+
+ +
+
+ +**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time + +
+
+ +
+
+ +**last_sync_job:** `typing.Optional[str]` — Last sync job ID + +
+
+ +
+
+ +**meta:** `typing.Optional[typing.Optional[typing.Any]]` + +
+
+ +
+
+ +**prefix:** `typing.Optional[str]` — Azure blob prefix name + +
+
+ +
+
+ +**presign:** `typing.Optional[bool]` + +
+
+ +
+
+ +**presign_ttl:** `typing.Optional[int]` — Presigned URLs TTL (in minutes) + +
+
+ +
+
+ +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects + +
+
+ +
+
+ +**status:** `typing.Optional[StatusC5AEnum]` + +
+
+ +
+
+ +**synchronizable:** `typing.Optional[bool]` + +
+
+ +
+
+ +**tenant_id:** `typing.Optional[str]` — Azure Tenant ID + +
+
+ +
+
+ +**title:** `typing.Optional[str]` — Cloud storage title + +
+
+ +
+
+ +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync + +
+
+ +
+
+ +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs + +
+
+ +
+
+ +**user_delegation_key:** `typing.Optional[str]` — User Delegation Key (Backend) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure_spi.get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific Azure import storage connection that was set up with Service Principal authentication. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure_spi.get( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure_spi.delete(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a specific Azure import storage connection that was set up with Service Principal authentication. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.azure_spi.delete( + id=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.import_storage.azure_spi.update(...)
@@ -15039,7 +19138,7 @@ client.export_storage.s3s.create(
-Validate a specific S3 export storage connection that was set up with IAM role access. +Update a specific Azure import storage connection that was set up with Service Principal authentication.
@@ -15059,9 +19158,8 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3s.validate( - project=1, - role_arn="role_arn", +client.import_storage.azure_spi.update( + id=1, ) ``` @@ -15078,23 +19176,7 @@ client.export_storage.s3s.validate(
-**project:** `int` — A unique integer value identifying this project. - -
-
- -
-
- -**role_arn:** `str` — AWS RoleArn - -
-
- -
-
- -**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID +**id:** `int`
@@ -15102,7 +19184,7 @@ client.export_storage.s3s.validate(
-**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY +**account_name:** `typing.Optional[str]` — Azure Blob account name
@@ -15110,7 +19192,7 @@ client.export_storage.s3s.validate(
-**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN +**client_id:** `typing.Optional[str]` — Azure Blob Service Principal Client ID
@@ -15118,7 +19200,7 @@ client.export_storage.s3s.validate(
-**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID +**client_secret:** `typing.Optional[str]` — Azure Blob Service Principal Client Secret
@@ -15126,7 +19208,7 @@ client.export_storage.s3s.validate(
-**bucket:** `typing.Optional[str]` — S3 bucket name +**container:** `typing.Optional[str]` — Azure blob container
@@ -15134,7 +19216,7 @@ client.export_storage.s3s.validate(
-**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled +**description:** `typing.Optional[str]` — Cloud storage description
@@ -15142,7 +19224,7 @@ client.export_storage.s3s.validate(
-**description:** `typing.Optional[str]` — Cloud storage description +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time
@@ -15150,7 +19232,7 @@ client.export_storage.s3s.validate(
-**external_id:** `typing.Optional[str]` — AWS ExternalId +**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time
@@ -15158,7 +19240,7 @@ client.export_storage.s3s.validate(
-**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time +**last_sync_job:** `typing.Optional[str]` — Last sync job ID
@@ -15166,7 +19248,7 @@ client.export_storage.s3s.validate(
-**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time +**meta:** `typing.Optional[typing.Optional[typing.Any]]`
@@ -15174,7 +19256,7 @@ client.export_storage.s3s.validate(
-**last_sync_job:** `typing.Optional[str]` — Last sync job ID +**prefix:** `typing.Optional[str]` — Azure blob prefix name
@@ -15182,7 +19264,7 @@ client.export_storage.s3s.validate(
-**legacy_auth:** `typing.Optional[bool]` +**presign:** `typing.Optional[bool]`
@@ -15190,7 +19272,7 @@ client.export_storage.s3s.validate(
-**meta:** `typing.Optional[typing.Optional[typing.Any]]` +**presign_ttl:** `typing.Optional[int]` — Presigned URLs TTL (in minutes)
@@ -15198,7 +19280,7 @@ client.export_storage.s3s.validate(
-**prefix:** `typing.Optional[str]` — S3 bucket prefix +**project:** `typing.Optional[int]` — A unique integer value identifying this project.
@@ -15214,7 +19296,7 @@ client.export_storage.s3s.validate(
-**region_name:** `typing.Optional[str]` — AWS Region +**status:** `typing.Optional[StatusC5AEnum]`
@@ -15222,7 +19304,7 @@ client.export_storage.s3s.validate(
-**s3endpoint:** `typing.Optional[str]` — S3 Endpoint +**synchronizable:** `typing.Optional[bool]`
@@ -15230,7 +19312,7 @@ client.export_storage.s3s.validate(
-**status:** `typing.Optional[StatusC5AEnum]` +**tenant_id:** `typing.Optional[str]` — Azure Tenant ID
@@ -15238,7 +19320,7 @@ client.export_storage.s3s.validate(
-**synchronizable:** `typing.Optional[bool]` +**title:** `typing.Optional[str]` — Cloud storage title
@@ -15246,7 +19328,7 @@ client.export_storage.s3s.validate(
-**title:** `typing.Optional[str]` — Cloud storage title +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync
@@ -15254,7 +19336,7 @@ client.export_storage.s3s.validate(
-**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs
@@ -15262,7 +19344,7 @@ client.export_storage.s3s.validate(
-**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs +**user_delegation_key:** `typing.Optional[str]` — User Delegation Key (Backend)
@@ -15282,7 +19364,7 @@ client.export_storage.s3s.validate(
-
client.export_storage.s3s.get(...) +
client.import_storage.azure_spi.sync(...)
@@ -15294,7 +19376,7 @@ client.export_storage.s3s.validate(
-Get a specific S3 export storage connection that was set up with IAM role access. +Sync tasks from an Azure import storage connection that was set up with Service Principal authentication.
@@ -15314,7 +19396,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3s.get( +client.import_storage.azure_spi.sync( id=1, ) @@ -15352,7 +19434,8 @@ client.export_storage.s3s.get(
-
client.export_storage.s3s.delete(...) +## ImportStorage Gcs +
client.import_storage.gcs.list(...)
@@ -15364,7 +19447,7 @@ client.export_storage.s3s.get(
-Delete a specific S3 export storage connection that was set up with IAM role access. +Get a list of all GCS import storage connections.
@@ -15384,9 +19467,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3s.delete( - id=1, -) +client.import_storage.gcs.list() ``` @@ -15402,7 +19483,15 @@ client.export_storage.s3s.delete(
-**id:** `int` +**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID
@@ -15422,7 +19511,7 @@ client.export_storage.s3s.delete(
-
client.export_storage.s3s.update(...) +
client.import_storage.gcs.create(...)
@@ -15434,7 +19523,7 @@ client.export_storage.s3s.delete(
-Update a specific S3 export storage connection that was set up with IAM role access. +Create a new GCS import storage connection.
@@ -15454,9 +19543,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.export_storage.s3s.update( - id=1, -) +client.import_storage.gcs.create() ``` @@ -15472,7 +19559,7 @@ client.export_storage.s3s.update(
-**id:** `int` +**bucket:** `typing.Optional[str]` — GCS bucket name
@@ -15480,7 +19567,7 @@ client.export_storage.s3s.update(
-**aws_access_key_id:** `typing.Optional[str]` — AWS_ACCESS_KEY_ID +**description:** `typing.Optional[str]` — Storage description
@@ -15488,7 +19575,7 @@ client.export_storage.s3s.update(
-**aws_secret_access_key:** `typing.Optional[str]` — AWS_SECRET_ACCESS_KEY +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details.
@@ -15496,7 +19583,7 @@ client.export_storage.s3s.update(
-**aws_session_token:** `typing.Optional[str]` — AWS_SESSION_TOKEN +**google_project_id:** `typing.Optional[str]` — Google project ID
@@ -15504,7 +19591,7 @@ client.export_storage.s3s.update(
-**aws_sse_kms_key_id:** `typing.Optional[str]` — AWS SSE KMS Key ID +**prefix:** `typing.Optional[str]` — GCS bucket prefix
@@ -15512,7 +19599,7 @@ client.export_storage.s3s.update(
-**bucket:** `typing.Optional[str]` — S3 bucket name +**presign:** `typing.Optional[bool]` — Presign URLs for direct download
@@ -15520,7 +19607,7 @@ client.export_storage.s3s.update(
-**can_delete_objects:** `typing.Optional[bool]` — Deletion from storage enabled +**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes
@@ -15528,7 +19615,7 @@ client.export_storage.s3s.update(
-**description:** `typing.Optional[str]` — Cloud storage description +**project:** `typing.Optional[int]` — Project ID
@@ -15536,7 +19623,7 @@ client.export_storage.s3s.update(
-**external_id:** `typing.Optional[str]` — AWS ExternalId +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported.
@@ -15544,7 +19631,7 @@ client.export_storage.s3s.update(
-**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time +**title:** `typing.Optional[str]` — Storage title
@@ -15552,7 +19639,7 @@ client.export_storage.s3s.update(
-**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
@@ -15560,111 +19647,67 @@ client.export_storage.s3s.update(
-**last_sync_job:** `typing.Optional[str]` — Last sync job ID +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
- -
-
- -**legacy_auth:** `typing.Optional[bool]` -
-
-
-**meta:** `typing.Optional[typing.Optional[typing.Any]]` -
+
+
client.import_storage.gcs.validate(...)
-**prefix:** `typing.Optional[str]` — S3 bucket prefix - -
-
+#### 📝 Description
-**project:** `typing.Optional[int]` — A unique integer value identifying this project. - -
-
-
-**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects - +Validate a specific GCS import storage connection.
- -
-
- -**region_name:** `typing.Optional[str]` — AWS Region -
-
-
- -**role_arn:** `typing.Optional[str]` — AWS RoleArn - -
-
+#### 🔌 Usage
-**s3endpoint:** `typing.Optional[str]` — S3 Endpoint - -
-
-
-**status:** `typing.Optional[StatusC5AEnum]` - -
-
+```python +from label_studio_sdk import LabelStudio -
-
+client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.gcs.validate() -**synchronizable:** `typing.Optional[bool]` - +```
- -
-
- -**title:** `typing.Optional[str]` — Cloud storage title -
+#### ⚙️ Parameters +
-**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync - -
-
-
-**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs +**bucket:** `typing.Optional[str]` — GCS bucket name
@@ -15672,69 +19715,31 @@ client.export_storage.s3s.update(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**description:** `typing.Optional[str]` — Storage description
- - - - - - -
- -
client.export_storage.s3s.sync(...) -
-
- -#### 📝 Description - -
-
-Sync tasks from an S3 export storage. -
-
+**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. +
-#### 🔌 Usage -
-
-
- -```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.export_storage.s3s.sync( - id=1, -) - -``` -
-
+**google_project_id:** `typing.Optional[str]` — Google project ID +
-#### ⚙️ Parameters -
-
-
- -**id:** `int` +**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated
@@ -15742,68 +19747,47 @@ client.export_storage.s3s.sync(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**prefix:** `typing.Optional[str]` — GCS bucket prefix
-
-
- - -
-
-
- -## ImportStorage Azure -
client.import_storage.azure.list(...) -
-
- -#### 📝 Description - -
-
-Get list of all Azure import storage connections. -
-
+**presign:** `typing.Optional[bool]` — Presign URLs for direct download +
-#### 🔌 Usage -
+**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes + +
+
+
-```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.import_storage.azure.list() - -``` -
-
+**project:** `typing.Optional[int]` — Project ID +
-#### ⚙️ Parameters -
+**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. + +
+
+
-**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. +**title:** `typing.Optional[str]` — Storage title
@@ -15811,7 +19795,7 @@ client.import_storage.azure.list()
-**project:** `typing.Optional[int]` — Project ID +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio.
@@ -15831,7 +19815,7 @@ client.import_storage.azure.list()
-
client.import_storage.azure.create(...) +
client.import_storage.gcs.get(...)
@@ -15843,7 +19827,7 @@ client.import_storage.azure.list()
-Create new Azure import storage +Get a specific GCS import storage connection.
@@ -15863,7 +19847,9 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.azure.create() +client.import_storage.gcs.get( + id=1, +) ``` @@ -15879,7 +19865,7 @@ client.import_storage.azure.create()
-**account_key:** `typing.Optional[str]` — Azure Blob account key +**id:** `int`
@@ -15887,79 +19873,69 @@ client.import_storage.azure.create()
-**account_name:** `typing.Optional[str]` — Azure Blob account name +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
- -
-
- -**container:** `typing.Optional[str]` — Azure blob container -
-
-
-**description:** `typing.Optional[str]` — Storage description -
+
+
client.import_storage.gcs.delete(...)
-**prefix:** `typing.Optional[str]` — Azure blob prefix name - -
-
+#### 📝 Description
-**presign:** `typing.Optional[bool]` — Presign URLs for direct download - -
-
-
-**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes - +Delete a specific GCS import storage connection. +
+
+#### 🔌 Usage +
-**project:** `typing.Optional[int]` — Project ID - -
-
-
-**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.gcs.delete( + id=1, +) + +``` +
+
+#### ⚙️ Parameters +
-**title:** `typing.Optional[str]` — Storage title - -
-
-
-**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**id:** `int`
@@ -15979,7 +19955,7 @@ client.import_storage.azure.create()
-
client.import_storage.azure.validate(...) +
client.import_storage.gcs.update(...)
@@ -15991,7 +19967,7 @@ client.import_storage.azure.create()
-Validate a specific Azure import storage connection. +Update a specific GCS import storage connection.
@@ -16011,7 +19987,9 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.azure.validate() +client.import_storage.gcs.update( + id=1, +) ``` @@ -16027,7 +20005,7 @@ client.import_storage.azure.validate()
-**account_key:** `typing.Optional[str]` — Azure Blob account key +**id:** `int`
@@ -16035,7 +20013,7 @@ client.import_storage.azure.validate()
-**account_name:** `typing.Optional[str]` — Azure Blob account name +**bucket:** `typing.Optional[str]` — GCS bucket name
@@ -16043,7 +20021,7 @@ client.import_storage.azure.validate()
-**container:** `typing.Optional[str]` — Azure blob container +**description:** `typing.Optional[str]` — Storage description
@@ -16051,7 +20029,7 @@ client.import_storage.azure.validate()
-**description:** `typing.Optional[str]` — Storage description +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details.
@@ -16059,7 +20037,7 @@ client.import_storage.azure.validate()
-**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated +**google_project_id:** `typing.Optional[str]` — Google project ID
@@ -16067,7 +20045,7 @@ client.import_storage.azure.validate()
-**prefix:** `typing.Optional[str]` — Azure blob prefix name +**prefix:** `typing.Optional[str]` — GCS bucket prefix
@@ -16135,7 +20113,7 @@ client.import_storage.azure.validate()
-
client.import_storage.azure.get(...) +
client.import_storage.gcs.sync(...)
@@ -16147,7 +20125,7 @@ client.import_storage.azure.validate()
-Get a specific Azure import storage connection. +Sync tasks from a GCS import storage connection.
@@ -16167,7 +20145,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.azure.get( +client.import_storage.gcs.sync( id=1, ) @@ -16185,7 +20163,7 @@ client.import_storage.azure.get(
-**id:** `int` +**id:** `int` — Storage ID
@@ -16205,7 +20183,8 @@ client.import_storage.azure.get(
-
client.import_storage.azure.delete(...) +## ImportStorage Gcswif +
client.import_storage.gcswif.list(...)
@@ -16217,7 +20196,7 @@ client.import_storage.azure.get(
-Delete a specific Azure import storage connection. +Get list of all GCS import storage connections set up with WIF authentication.
@@ -16237,9 +20216,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.azure.delete( - id=1, -) +client.import_storage.gcswif.list() ``` @@ -16255,7 +20232,15 @@ client.import_storage.azure.delete(
-**id:** `int` +**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. + +
+
+ +
+
+ +**project:** `typing.Optional[int]` — Project ID
@@ -16275,7 +20260,7 @@ client.import_storage.azure.delete(
-
client.import_storage.azure.update(...) +
client.import_storage.gcswif.create(...)
@@ -16287,7 +20272,7 @@ client.import_storage.azure.delete(
-Update a specific Azure import storage connection. +Create GCS import storage with WIF.
@@ -16307,8 +20292,8 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.azure.update( - id=1, +client.import_storage.gcswif.create( + project=1, ) ``` @@ -16325,7 +20310,7 @@ client.import_storage.azure.update(
-**id:** `int` +**project:** `int` — A unique integer value identifying this project.
@@ -16333,7 +20318,7 @@ client.import_storage.azure.update(
-**account_key:** `typing.Optional[str]` — Azure Blob account key +**bucket:** `typing.Optional[str]` — GCS bucket name
@@ -16341,7 +20326,7 @@ client.import_storage.azure.update(
-**account_name:** `typing.Optional[str]` — Azure Blob account name +**description:** `typing.Optional[str]` — Cloud storage description
@@ -16349,7 +20334,7 @@ client.import_storage.azure.update(
-**container:** `typing.Optional[str]` — Azure blob container +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file
@@ -16357,7 +20342,7 @@ client.import_storage.azure.update(
-**description:** `typing.Optional[str]` — Storage description +**google_project_id:** `typing.Optional[str]` — Google project ID
@@ -16365,7 +20350,7 @@ client.import_storage.azure.update(
-**prefix:** `typing.Optional[str]` — Azure blob prefix name +**google_project_number:** `typing.Optional[str]` — Google project number
@@ -16373,7 +20358,7 @@ client.import_storage.azure.update(
-**presign:** `typing.Optional[bool]` — Presign URLs for direct download +**google_service_account_email:** `typing.Optional[str]` — Google service account email
@@ -16381,7 +20366,7 @@ client.import_storage.azure.update(
-**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes +**google_wif_pool_id:** `typing.Optional[str]` — Google WIF pool ID
@@ -16389,7 +20374,7 @@ client.import_storage.azure.update(
-**project:** `typing.Optional[int]` — Project ID +**google_wif_provider_id:** `typing.Optional[str]` — Google WIF provider ID
@@ -16397,7 +20382,7 @@ client.import_storage.azure.update(
-**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time
@@ -16405,7 +20390,7 @@ client.import_storage.azure.update(
-**title:** `typing.Optional[str]` — Storage title +**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time
@@ -16413,7 +20398,7 @@ client.import_storage.azure.update(
-**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**last_sync_job:** `typing.Optional[str]` — Last sync job ID
@@ -16421,69 +20406,15 @@ client.import_storage.azure.update(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**meta:** `typing.Optional[typing.Optional[typing.Any]]`
- -
- - - - -
- -
client.import_storage.azure.sync(...) -
-
- -#### 📝 Description - -
-
- -
-
- -Sync tasks from an Azure import storage connection. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.import_storage.azure.sync( - id=1, -) - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
-**id:** `int` — Storage ID +**prefix:** `typing.Optional[str]` — GCS bucket prefix
@@ -16491,68 +20422,55 @@ client.import_storage.azure.sync(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**presign:** `typing.Optional[bool]`
-
-
- - -
-
-
- -## ImportStorage Gcs -
client.import_storage.gcs.list(...) -
-
- -#### 📝 Description
-
-
- -Get a list of all GCS import storage connections. -
-
+**presign_ttl:** `typing.Optional[int]` — Presigned URLs TTL (in minutes) +
-#### 🔌 Usage -
+**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects + +
+
+
-```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.import_storage.gcs.list() - -``` +**status:** `typing.Optional[StatusC5AEnum]` +
+ +
+
+ +**synchronizable:** `typing.Optional[bool]` +
-#### ⚙️ Parameters -
+**title:** `typing.Optional[str]` — Cloud storage title + +
+
+
-**ordering:** `typing.Optional[str]` — Which field to use when ordering the results. +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync
@@ -16560,7 +20478,7 @@ client.import_storage.gcs.list()
-**project:** `typing.Optional[int]` — Project ID +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs
@@ -16580,7 +20498,7 @@ client.import_storage.gcs.list()
-
client.import_storage.gcs.create(...) +
client.import_storage.gcswif.validate(...)
@@ -16592,7 +20510,7 @@ client.import_storage.gcs.list()
-Create a new GCS import storage connection. +Validate a specific GCS import storage connection that was set up with WIF authentication.
@@ -16612,7 +20530,9 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.gcs.create() +client.import_storage.gcswif.validate( + project=1, +) ``` @@ -16628,7 +20548,7 @@ client.import_storage.gcs.create()
-**bucket:** `typing.Optional[str]` — GCS bucket name +**project:** `int` — A unique integer value identifying this project.
@@ -16636,7 +20556,7 @@ client.import_storage.gcs.create()
-**description:** `typing.Optional[str]` — Storage description +**bucket:** `typing.Optional[str]` — GCS bucket name
@@ -16644,7 +20564,7 @@ client.import_storage.gcs.create()
-**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. +**description:** `typing.Optional[str]` — Cloud storage description
@@ -16652,7 +20572,7 @@ client.import_storage.gcs.create()
-**google_project_id:** `typing.Optional[str]` — Google project ID +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file
@@ -16660,7 +20580,7 @@ client.import_storage.gcs.create()
-**prefix:** `typing.Optional[str]` — GCS bucket prefix +**google_project_id:** `typing.Optional[str]` — Google project ID
@@ -16668,7 +20588,7 @@ client.import_storage.gcs.create()
-**presign:** `typing.Optional[bool]` — Presign URLs for direct download +**google_project_number:** `typing.Optional[str]` — Google project number
@@ -16676,7 +20596,7 @@ client.import_storage.gcs.create()
-**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes +**google_service_account_email:** `typing.Optional[str]` — Google service account email
@@ -16684,7 +20604,7 @@ client.import_storage.gcs.create()
-**project:** `typing.Optional[int]` — Project ID +**google_wif_pool_id:** `typing.Optional[str]` — Google WIF pool ID
@@ -16692,7 +20612,7 @@ client.import_storage.gcs.create()
-**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**google_wif_provider_id:** `typing.Optional[str]` — Google WIF provider ID
@@ -16700,7 +20620,7 @@ client.import_storage.gcs.create()
-**title:** `typing.Optional[str]` — Storage title +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time
@@ -16708,7 +20628,7 @@ client.import_storage.gcs.create()
-**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time
@@ -16716,67 +20636,47 @@ client.import_storage.gcs.create()
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**last_sync_job:** `typing.Optional[str]` — Last sync job ID
- -
+
+
+**meta:** `typing.Optional[typing.Optional[typing.Any]]` +
-
-
client.import_storage.gcs.validate(...)
-#### 📝 Description - -
-
+**prefix:** `typing.Optional[str]` — GCS bucket prefix + +
+
-Validate a specific GCS import storage connection. -
-
+**presign:** `typing.Optional[bool]` +
-#### 🔌 Usage - -
-
-
-```python -from label_studio_sdk import LabelStudio - -client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.import_storage.gcs.validate() - -``` -
-
+**presign_ttl:** `typing.Optional[int]` — Presigned URLs TTL (in minutes) +
-#### ⚙️ Parameters - -
-
-
-**bucket:** `typing.Optional[str]` — GCS bucket name +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects
@@ -16784,7 +20684,7 @@ client.import_storage.gcs.validate()
-**description:** `typing.Optional[str]` — Storage description +**status:** `typing.Optional[StatusC5AEnum]`
@@ -16792,7 +20692,7 @@ client.import_storage.gcs.validate()
-**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. +**synchronizable:** `typing.Optional[bool]`
@@ -16800,7 +20700,7 @@ client.import_storage.gcs.validate()
-**google_project_id:** `typing.Optional[str]` — Google project ID +**title:** `typing.Optional[str]` — Cloud storage title
@@ -16808,7 +20708,7 @@ client.import_storage.gcs.validate()
-**id:** `typing.Optional[int]` — Storage ID. If set, storage with specified ID will be updated +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync
@@ -16816,7 +20716,7 @@ client.import_storage.gcs.validate()
-**prefix:** `typing.Optional[str]` — GCS bucket prefix +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs
@@ -16824,47 +20724,69 @@ client.import_storage.gcs.validate()
-**presign:** `typing.Optional[bool]` — Presign URLs for direct download +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
-
-
-**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes -
+
+
client.import_storage.gcswif.get(...)
-**project:** `typing.Optional[int]` — Project ID - -
-
+#### 📝 Description
-**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. - +
+
+ +Get a specific GCS import storage connection that was set up with WIF. +
+
+#### 🔌 Usage +
-**title:** `typing.Optional[str]` — Storage title - +
+
+ +```python +from label_studio_sdk import LabelStudio + +client = LabelStudio( + api_key="YOUR_API_KEY", +) +client.import_storage.gcswif.get( + id=1, +) + +``` +
+
+#### ⚙️ Parameters +
-**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +
+
+ +**id:** `int`
@@ -16884,7 +20806,7 @@ client.import_storage.gcs.validate()
-
client.import_storage.gcs.get(...) +
client.import_storage.gcswif.delete(...)
@@ -16896,7 +20818,7 @@ client.import_storage.gcs.validate()
-Get a specific GCS import storage connection. +Delete a specific GCS import storage connection that was set up with WIF authentication.
@@ -16916,7 +20838,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.gcs.get( +client.import_storage.gcswif.delete( id=1, ) @@ -16954,7 +20876,7 @@ client.import_storage.gcs.get(
-
client.import_storage.gcs.delete(...) +
client.import_storage.gcswif.update(...)
@@ -16966,7 +20888,7 @@ client.import_storage.gcs.get(
-Delete a specific GCS import storage connection. +Update a specific GCS import storage connection that was set up with WIF authentication.
@@ -16986,7 +20908,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.gcs.delete( +client.import_storage.gcswif.update( id=1, ) @@ -17012,69 +20934,87 @@ client.import_storage.gcs.delete(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**bucket:** `typing.Optional[str]` — GCS bucket name
- -
+
+
+**description:** `typing.Optional[str]` — Cloud storage description +
-
-
client.import_storage.gcs.update(...)
-#### 📝 Description +**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file + +
+
+**google_project_id:** `typing.Optional[str]` — Google project ID + +
+
+
-Update a specific GCS import storage connection. -
-
+**google_project_number:** `typing.Optional[str]` — Google project number + -#### 🔌 Usage -
+**google_service_account_email:** `typing.Optional[str]` — Google service account email + +
+
+
-```python -from label_studio_sdk import LabelStudio +**google_wif_pool_id:** `typing.Optional[str]` — Google WIF pool ID + +
+
-client = LabelStudio( - api_key="YOUR_API_KEY", -) -client.import_storage.gcs.update( - id=1, -) +
+
-``` +**google_wif_provider_id:** `typing.Optional[str]` — Google WIF provider ID +
+ +
+
+ +**last_sync:** `typing.Optional[dt.datetime]` — Last sync finished time +
-#### ⚙️ Parameters -
+**last_sync_count:** `typing.Optional[int]` — Count of tasks synced last time + +
+
+
-**id:** `int` +**last_sync_job:** `typing.Optional[str]` — Last sync job ID
@@ -17082,7 +21022,7 @@ client.import_storage.gcs.update(
-**bucket:** `typing.Optional[str]` — GCS bucket name +**meta:** `typing.Optional[typing.Optional[typing.Any]]`
@@ -17090,7 +21030,7 @@ client.import_storage.gcs.update(
-**description:** `typing.Optional[str]` — Storage description +**prefix:** `typing.Optional[str]` — GCS bucket prefix
@@ -17098,7 +21038,7 @@ client.import_storage.gcs.update(
-**google_application_credentials:** `typing.Optional[str]` — The content of GOOGLE_APPLICATION_CREDENTIALS json file. Check official Google Cloud Authentication documentation for more details. +**presign:** `typing.Optional[bool]`
@@ -17106,7 +21046,7 @@ client.import_storage.gcs.update(
-**google_project_id:** `typing.Optional[str]` — Google project ID +**presign_ttl:** `typing.Optional[int]` — Presigned URLs TTL (in minutes)
@@ -17114,7 +21054,7 @@ client.import_storage.gcs.update(
-**prefix:** `typing.Optional[str]` — GCS bucket prefix +**project:** `typing.Optional[int]` — A unique integer value identifying this project.
@@ -17122,7 +21062,7 @@ client.import_storage.gcs.update(
-**presign:** `typing.Optional[bool]` — Presign URLs for direct download +**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects
@@ -17130,7 +21070,7 @@ client.import_storage.gcs.update(
-**presign_ttl:** `typing.Optional[int]` — Presign TTL in minutes +**status:** `typing.Optional[StatusC5AEnum]`
@@ -17138,7 +21078,7 @@ client.import_storage.gcs.update(
-**project:** `typing.Optional[int]` — Project ID +**synchronizable:** `typing.Optional[bool]`
@@ -17146,7 +21086,7 @@ client.import_storage.gcs.update(
-**regex_filter:** `typing.Optional[str]` — Cloud storage regex for filtering objects. You must specify it otherwise no objects will be imported. +**title:** `typing.Optional[str]` — Cloud storage title
@@ -17154,7 +21094,7 @@ client.import_storage.gcs.update(
-**title:** `typing.Optional[str]` — Storage title +**traceback:** `typing.Optional[str]` — Traceback report for the last failed sync
@@ -17162,7 +21102,7 @@ client.import_storage.gcs.update(
-**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs. For example, if your bucket contains images, you can use this option to generate URLs for these images. If set to False, it will read the content of the file and load it into Label Studio. +**use_blob_urls:** `typing.Optional[bool]` — Interpret objects as BLOBs and generate URLs
@@ -17182,7 +21122,7 @@ client.import_storage.gcs.update(
-
client.import_storage.gcs.sync(...) +
client.import_storage.gcswif.sync(...)
@@ -17194,7 +21134,7 @@ client.import_storage.gcs.update(
-Sync tasks from a GCS import storage connection. +Sync tasks from an GCS import storage connection that was set up with WIF authentication.
@@ -17214,7 +21154,7 @@ from label_studio_sdk import LabelStudio client = LabelStudio( api_key="YOUR_API_KEY", ) -client.import_storage.gcs.sync( +client.import_storage.gcswif.sync( id=1, ) @@ -17232,7 +21172,7 @@ client.import_storage.gcs.sync(
-**id:** `int` — Storage ID +**id:** `int`
diff --git a/src/label_studio_sdk/__init__.py b/src/label_studio_sdk/__init__.py index 756581b55..e2313ea8a 100644 --- a/src/label_studio_sdk/__init__.py +++ b/src/label_studio_sdk/__init__.py @@ -25,6 +25,10 @@ AuthMethodEnum, AzureBlobExportStorage, AzureBlobImportStorage, + AzureServicePrincipalExportStorage, + AzureServicePrincipalExportStorageRequest, + AzureServicePrincipalImportStorage, + AzureServicePrincipalImportStorageRequest, BatchFailedPredictions, BatchPredictions, BillingChecks, @@ -51,6 +55,10 @@ FinishedEnum, GcsExportStorage, GcsImportStorage, + GcswifExportStorage, + GcswifExportStorageRequest, + GcswifImportStorage, + GcswifImportStorageRequest, Hotkeys, ImportApiRequest, InferenceRunCostEstimate, @@ -358,6 +366,10 @@ "AuthMethodEnum", "AzureBlobExportStorage", "AzureBlobImportStorage", + "AzureServicePrincipalExportStorage", + "AzureServicePrincipalExportStorageRequest", + "AzureServicePrincipalImportStorage", + "AzureServicePrincipalImportStorageRequest", "BadRequestError", "BatchFailedPredictions", "BatchPredictions", @@ -388,6 +400,10 @@ "ForbiddenError", "GcsExportStorage", "GcsImportStorage", + "GcswifExportStorage", + "GcswifExportStorageRequest", + "GcswifImportStorage", + "GcswifImportStorageRequest", "Hotkeys", "ImportApiRequest", "ImportStorageListTypesResponseItem", diff --git a/src/label_studio_sdk/export_storage/__init__.py b/src/label_studio_sdk/export_storage/__init__.py index a006af5f2..0fbe3a0f2 100644 --- a/src/label_studio_sdk/export_storage/__init__.py +++ b/src/label_studio_sdk/export_storage/__init__.py @@ -1,6 +1,6 @@ # This file was auto-generated by Fern from our API Definition. from .types import ExportStorageListTypesResponseItem -from . import azure, gcs, local, redis, s3, s3s +from . import azure, azure_spi, gcs, gcswif, local, redis, s3, s3s -__all__ = ["ExportStorageListTypesResponseItem", "azure", "gcs", "local", "redis", "s3", "s3s"] +__all__ = ["ExportStorageListTypesResponseItem", "azure", "azure_spi", "gcs", "gcswif", "local", "redis", "s3", "s3s"] diff --git a/src/label_studio_sdk/export_storage/azure_spi/__init__.py b/src/label_studio_sdk/export_storage/azure_spi/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/src/label_studio_sdk/export_storage/azure_spi/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/label_studio_sdk/export_storage/azure_spi/client.py b/src/label_studio_sdk/export_storage/azure_spi/client.py new file mode 100644 index 000000000..f46af9625 --- /dev/null +++ b/src/label_studio_sdk/export_storage/azure_spi/client.py @@ -0,0 +1,1354 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from ...core.client_wrapper import SyncClientWrapper +from ...core.request_options import RequestOptions +from ...types.azure_service_principal_export_storage import AzureServicePrincipalExportStorage +from ...core.unchecked_base_model import construct_type +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError +import datetime as dt +from ...types.status_c5a_enum import StatusC5AEnum +from ...core.jsonable_encoder import jsonable_encoder +from ...core.client_wrapper import AsyncClientWrapper + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class AzureSpiClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + ordering: typing.Optional[str] = None, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.List[AzureServicePrincipalExportStorage]: + """ + Get a list of all Azure export storage connections that were set up with Service Principal authentication. + + Parameters + ---------- + ordering : typing.Optional[str] + Which field to use when ordering the results. + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[AzureServicePrincipalExportStorage] + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.azure_spi.list() + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/azure_spi", + method="GET", + params={ + "ordering": ordering, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[AzureServicePrincipalExportStorage], + construct_type( + type_=typing.List[AzureServicePrincipalExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def create( + self, + *, + project: int, + account_name: typing.Optional[str] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + client_id: typing.Optional[str] = OMIT, + client_secret: typing.Optional[str] = OMIT, + container: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + tenant_id: typing.Optional[str] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + user_delegation_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AzureServicePrincipalExportStorage: + """ + Create an Azure export storage connection with Service Principal authentication to store annotations. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + account_name : typing.Optional[str] + Azure Blob account name + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + client_id : typing.Optional[str] + Azure Blob Service Principal Client ID + + client_secret : typing.Optional[str] + Azure Blob Service Principal Client Secret + + container : typing.Optional[str] + Azure blob container + + description : typing.Optional[str] + Cloud storage description + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + Azure blob prefix name + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + tenant_id : typing.Optional[str] + Azure Tenant ID + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + user_delegation_key : typing.Optional[str] + User Delegation Key (Backend) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalExportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.azure_spi.create( + project=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/azure_spi", + method="POST", + json={ + "account_name": account_name, + "can_delete_objects": can_delete_objects, + "client_id": client_id, + "client_secret": client_secret, + "container": container, + "description": description, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "tenant_id": tenant_id, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + "user_delegation_key": user_delegation_key, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalExportStorage, + construct_type( + type_=AzureServicePrincipalExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def validate( + self, + *, + project: int, + account_name: typing.Optional[str] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + client_id: typing.Optional[str] = OMIT, + client_secret: typing.Optional[str] = OMIT, + container: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + tenant_id: typing.Optional[str] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + user_delegation_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: + """ + Validate a specific Azure export storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + account_name : typing.Optional[str] + Azure Blob account name + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + client_id : typing.Optional[str] + Azure Blob Service Principal Client ID + + client_secret : typing.Optional[str] + Azure Blob Service Principal Client Secret + + container : typing.Optional[str] + Azure blob container + + description : typing.Optional[str] + Cloud storage description + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + Azure blob prefix name + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + tenant_id : typing.Optional[str] + Azure Tenant ID + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + user_delegation_key : typing.Optional[str] + User Delegation Key (Backend) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.azure_spi.validate( + project=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/azure_spi/validate", + method="POST", + json={ + "account_name": account_name, + "can_delete_objects": can_delete_objects, + "client_id": client_id, + "client_secret": client_secret, + "container": container, + "description": description, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "tenant_id": tenant_id, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + "user_delegation_key": user_delegation_key, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AzureServicePrincipalExportStorage: + """ + Get a specific Azure export storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalExportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.azure_spi.get( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure_spi/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalExportStorage, + construct_type( + type_=AzureServicePrincipalExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific Azure export storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.azure_spi.delete( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure_spi/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def update( + self, + id: int, + *, + account_name: typing.Optional[str] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + client_id: typing.Optional[str] = OMIT, + client_secret: typing.Optional[str] = OMIT, + container: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + tenant_id: typing.Optional[str] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + user_delegation_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AzureServicePrincipalExportStorage: + """ + Update a specific Azure export storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + account_name : typing.Optional[str] + Azure Blob account name + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + client_id : typing.Optional[str] + Azure Blob Service Principal Client ID + + client_secret : typing.Optional[str] + Azure Blob Service Principal Client Secret + + container : typing.Optional[str] + Azure blob container + + description : typing.Optional[str] + Cloud storage description + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + Azure blob prefix name + + project : typing.Optional[int] + A unique integer value identifying this project. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + tenant_id : typing.Optional[str] + Azure Tenant ID + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + user_delegation_key : typing.Optional[str] + User Delegation Key (Backend) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalExportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.azure_spi.update( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure_spi/{jsonable_encoder(id)}", + method="PATCH", + json={ + "account_name": account_name, + "can_delete_objects": can_delete_objects, + "client_id": client_id, + "client_secret": client_secret, + "container": container, + "description": description, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "tenant_id": tenant_id, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + "user_delegation_key": user_delegation_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalExportStorage, + construct_type( + type_=AzureServicePrincipalExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AzureServicePrincipalExportStorage: + """ + Sync tasks from an Azure SPI export storage. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalExportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.azure_spi.sync( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/azure_spi/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalExportStorage, + construct_type( + type_=AzureServicePrincipalExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncAzureSpiClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + ordering: typing.Optional[str] = None, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.List[AzureServicePrincipalExportStorage]: + """ + Get a list of all Azure export storage connections that were set up with Service Principal authentication. + + Parameters + ---------- + ordering : typing.Optional[str] + Which field to use when ordering the results. + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[AzureServicePrincipalExportStorage] + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.azure_spi.list() + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/azure_spi", + method="GET", + params={ + "ordering": ordering, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[AzureServicePrincipalExportStorage], + construct_type( + type_=typing.List[AzureServicePrincipalExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def create( + self, + *, + project: int, + account_name: typing.Optional[str] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + client_id: typing.Optional[str] = OMIT, + client_secret: typing.Optional[str] = OMIT, + container: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + tenant_id: typing.Optional[str] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + user_delegation_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AzureServicePrincipalExportStorage: + """ + Create an Azure export storage connection with Service Principal authentication to store annotations. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + account_name : typing.Optional[str] + Azure Blob account name + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + client_id : typing.Optional[str] + Azure Blob Service Principal Client ID + + client_secret : typing.Optional[str] + Azure Blob Service Principal Client Secret + + container : typing.Optional[str] + Azure blob container + + description : typing.Optional[str] + Cloud storage description + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + Azure blob prefix name + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + tenant_id : typing.Optional[str] + Azure Tenant ID + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + user_delegation_key : typing.Optional[str] + User Delegation Key (Backend) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalExportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.azure_spi.create( + project=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/azure_spi", + method="POST", + json={ + "account_name": account_name, + "can_delete_objects": can_delete_objects, + "client_id": client_id, + "client_secret": client_secret, + "container": container, + "description": description, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "tenant_id": tenant_id, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + "user_delegation_key": user_delegation_key, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalExportStorage, + construct_type( + type_=AzureServicePrincipalExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def validate( + self, + *, + project: int, + account_name: typing.Optional[str] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + client_id: typing.Optional[str] = OMIT, + client_secret: typing.Optional[str] = OMIT, + container: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + tenant_id: typing.Optional[str] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + user_delegation_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: + """ + Validate a specific Azure export storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + account_name : typing.Optional[str] + Azure Blob account name + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + client_id : typing.Optional[str] + Azure Blob Service Principal Client ID + + client_secret : typing.Optional[str] + Azure Blob Service Principal Client Secret + + container : typing.Optional[str] + Azure blob container + + description : typing.Optional[str] + Cloud storage description + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + Azure blob prefix name + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + tenant_id : typing.Optional[str] + Azure Tenant ID + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + user_delegation_key : typing.Optional[str] + User Delegation Key (Backend) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.azure_spi.validate( + project=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/azure_spi/validate", + method="POST", + json={ + "account_name": account_name, + "can_delete_objects": can_delete_objects, + "client_id": client_id, + "client_secret": client_secret, + "container": container, + "description": description, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "tenant_id": tenant_id, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + "user_delegation_key": user_delegation_key, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AzureServicePrincipalExportStorage: + """ + Get a specific Azure export storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalExportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.azure_spi.get( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure_spi/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalExportStorage, + construct_type( + type_=AzureServicePrincipalExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific Azure export storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.azure_spi.delete( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure_spi/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def update( + self, + id: int, + *, + account_name: typing.Optional[str] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + client_id: typing.Optional[str] = OMIT, + client_secret: typing.Optional[str] = OMIT, + container: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + tenant_id: typing.Optional[str] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + user_delegation_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AzureServicePrincipalExportStorage: + """ + Update a specific Azure export storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + account_name : typing.Optional[str] + Azure Blob account name + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + client_id : typing.Optional[str] + Azure Blob Service Principal Client ID + + client_secret : typing.Optional[str] + Azure Blob Service Principal Client Secret + + container : typing.Optional[str] + Azure blob container + + description : typing.Optional[str] + Cloud storage description + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + Azure blob prefix name + + project : typing.Optional[int] + A unique integer value identifying this project. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + tenant_id : typing.Optional[str] + Azure Tenant ID + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + user_delegation_key : typing.Optional[str] + User Delegation Key (Backend) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalExportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.azure_spi.update( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure_spi/{jsonable_encoder(id)}", + method="PATCH", + json={ + "account_name": account_name, + "can_delete_objects": can_delete_objects, + "client_id": client_id, + "client_secret": client_secret, + "container": container, + "description": description, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "tenant_id": tenant_id, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + "user_delegation_key": user_delegation_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalExportStorage, + construct_type( + type_=AzureServicePrincipalExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AzureServicePrincipalExportStorage: + """ + Sync tasks from an Azure SPI export storage. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalExportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.azure_spi.sync( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/azure_spi/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalExportStorage, + construct_type( + type_=AzureServicePrincipalExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/export_storage/client.py b/src/label_studio_sdk/export_storage/client.py index 63c3167ef..387ed54fa 100644 --- a/src/label_studio_sdk/export_storage/client.py +++ b/src/label_studio_sdk/export_storage/client.py @@ -2,7 +2,9 @@ from ..core.client_wrapper import SyncClientWrapper from .azure.client import AzureClient +from .azure_spi.client import AzureSpiClient from .gcs.client import GcsClient +from .gcswif.client import GcswifClient from .local.client import LocalClient from .redis.client import RedisClient from .s3.client import S3Client @@ -15,7 +17,9 @@ from ..core.api_error import ApiError from ..core.client_wrapper import AsyncClientWrapper from .azure.client import AsyncAzureClient +from .azure_spi.client import AsyncAzureSpiClient from .gcs.client import AsyncGcsClient +from .gcswif.client import AsyncGcswifClient from .local.client import AsyncLocalClient from .redis.client import AsyncRedisClient from .s3.client import AsyncS3Client @@ -26,7 +30,9 @@ class ExportStorageClient: def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper self.azure = AzureClient(client_wrapper=self._client_wrapper) + self.azure_spi = AzureSpiClient(client_wrapper=self._client_wrapper) self.gcs = GcsClient(client_wrapper=self._client_wrapper) + self.gcswif = GcswifClient(client_wrapper=self._client_wrapper) self.local = LocalClient(client_wrapper=self._client_wrapper) self.redis = RedisClient(client_wrapper=self._client_wrapper) self.s3 = S3Client(client_wrapper=self._client_wrapper) @@ -81,7 +87,9 @@ class AsyncExportStorageClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper self.azure = AsyncAzureClient(client_wrapper=self._client_wrapper) + self.azure_spi = AsyncAzureSpiClient(client_wrapper=self._client_wrapper) self.gcs = AsyncGcsClient(client_wrapper=self._client_wrapper) + self.gcswif = AsyncGcswifClient(client_wrapper=self._client_wrapper) self.local = AsyncLocalClient(client_wrapper=self._client_wrapper) self.redis = AsyncRedisClient(client_wrapper=self._client_wrapper) self.s3 = AsyncS3Client(client_wrapper=self._client_wrapper) diff --git a/src/label_studio_sdk/export_storage/gcswif/__init__.py b/src/label_studio_sdk/export_storage/gcswif/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/src/label_studio_sdk/export_storage/gcswif/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/label_studio_sdk/export_storage/gcswif/client.py b/src/label_studio_sdk/export_storage/gcswif/client.py new file mode 100644 index 000000000..cb4a1d3b0 --- /dev/null +++ b/src/label_studio_sdk/export_storage/gcswif/client.py @@ -0,0 +1,1376 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from ...core.client_wrapper import SyncClientWrapper +from ...core.request_options import RequestOptions +from ...types.gcswif_export_storage import GcswifExportStorage +from ...core.unchecked_base_model import construct_type +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError +import datetime as dt +from ...types.status_c5a_enum import StatusC5AEnum +from ...core.jsonable_encoder import jsonable_encoder +from ...core.client_wrapper import AsyncClientWrapper + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class GcswifClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + ordering: typing.Optional[str] = None, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.List[GcswifExportStorage]: + """ + Get a list of all GCS export storage connections that were set up with WIF authentication. + + Parameters + ---------- + ordering : typing.Optional[str] + Which field to use when ordering the results. + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[GcswifExportStorage] + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.gcswif.list() + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/gcswif", + method="GET", + params={ + "ordering": ordering, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[GcswifExportStorage], + construct_type( + type_=typing.List[GcswifExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def create( + self, + *, + project: int, + bucket: typing.Optional[str] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + description: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + google_project_number: typing.Optional[str] = OMIT, + google_service_account_email: typing.Optional[str] = OMIT, + google_wif_pool_id: typing.Optional[str] = OMIT, + google_wif_provider_id: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> GcswifExportStorage: + """ + Create an GCS export storage connection with WIF authentication to store annotations. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + bucket : typing.Optional[str] + GCS bucket name + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + description : typing.Optional[str] + Cloud storage description + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file + + google_project_id : typing.Optional[str] + Google project ID + + google_project_number : typing.Optional[str] + Google project number + + google_service_account_email : typing.Optional[str] + Google service account email + + google_wif_pool_id : typing.Optional[str] + Google WIF pool ID + + google_wif_provider_id : typing.Optional[str] + Google WIF provider ID + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + GCS bucket prefix + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifExportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.gcswif.create( + project=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/gcswif", + method="POST", + json={ + "bucket": bucket, + "can_delete_objects": can_delete_objects, + "description": description, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + "google_project_number": google_project_number, + "google_service_account_email": google_service_account_email, + "google_wif_pool_id": google_wif_pool_id, + "google_wif_provider_id": google_wif_provider_id, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifExportStorage, + construct_type( + type_=GcswifExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def validate( + self, + *, + project: int, + bucket: typing.Optional[str] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + description: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + google_project_number: typing.Optional[str] = OMIT, + google_service_account_email: typing.Optional[str] = OMIT, + google_wif_pool_id: typing.Optional[str] = OMIT, + google_wif_provider_id: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: + """ + Validate a specific GCS export storage connection that was set up with WIF authentication. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + bucket : typing.Optional[str] + GCS bucket name + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + description : typing.Optional[str] + Cloud storage description + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file + + google_project_id : typing.Optional[str] + Google project ID + + google_project_number : typing.Optional[str] + Google project number + + google_service_account_email : typing.Optional[str] + Google service account email + + google_wif_pool_id : typing.Optional[str] + Google WIF pool ID + + google_wif_provider_id : typing.Optional[str] + Google WIF provider ID + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + GCS bucket prefix + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.gcswif.validate( + project=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/export/gcswif/validate", + method="POST", + json={ + "bucket": bucket, + "can_delete_objects": can_delete_objects, + "description": description, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + "google_project_number": google_project_number, + "google_service_account_email": google_service_account_email, + "google_wif_pool_id": google_wif_pool_id, + "google_wif_provider_id": google_wif_provider_id, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcswifExportStorage: + """ + Get a specific GCS export storage connection that was set up with WIF authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifExportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.gcswif.get( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcswif/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifExportStorage, + construct_type( + type_=GcswifExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific GCS export storage connection that was set up with WIF authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.gcswif.delete( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcswif/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def update( + self, + id: int, + *, + bucket: typing.Optional[str] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + description: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + google_project_number: typing.Optional[str] = OMIT, + google_service_account_email: typing.Optional[str] = OMIT, + google_wif_pool_id: typing.Optional[str] = OMIT, + google_wif_provider_id: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> GcswifExportStorage: + """ + Update a specific GCS export storage connection that was set up with WIF authentication. + + Parameters + ---------- + id : int + + bucket : typing.Optional[str] + GCS bucket name + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + description : typing.Optional[str] + Cloud storage description + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file + + google_project_id : typing.Optional[str] + Google project ID + + google_project_number : typing.Optional[str] + Google project number + + google_service_account_email : typing.Optional[str] + Google service account email + + google_wif_pool_id : typing.Optional[str] + Google WIF pool ID + + google_wif_provider_id : typing.Optional[str] + Google WIF provider ID + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + GCS bucket prefix + + project : typing.Optional[int] + A unique integer value identifying this project. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifExportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.gcswif.update( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcswif/{jsonable_encoder(id)}", + method="PATCH", + json={ + "bucket": bucket, + "can_delete_objects": can_delete_objects, + "description": description, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + "google_project_number": google_project_number, + "google_service_account_email": google_service_account_email, + "google_wif_pool_id": google_wif_pool_id, + "google_wif_provider_id": google_wif_provider_id, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifExportStorage, + construct_type( + type_=GcswifExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcswifExportStorage: + """ + Sync tasks from an GCS WIF export storage. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifExportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.export_storage.gcswif.sync( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/export/gcswif/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifExportStorage, + construct_type( + type_=GcswifExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncGcswifClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + ordering: typing.Optional[str] = None, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.List[GcswifExportStorage]: + """ + Get a list of all GCS export storage connections that were set up with WIF authentication. + + Parameters + ---------- + ordering : typing.Optional[str] + Which field to use when ordering the results. + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[GcswifExportStorage] + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.gcswif.list() + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/gcswif", + method="GET", + params={ + "ordering": ordering, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[GcswifExportStorage], + construct_type( + type_=typing.List[GcswifExportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def create( + self, + *, + project: int, + bucket: typing.Optional[str] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + description: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + google_project_number: typing.Optional[str] = OMIT, + google_service_account_email: typing.Optional[str] = OMIT, + google_wif_pool_id: typing.Optional[str] = OMIT, + google_wif_provider_id: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> GcswifExportStorage: + """ + Create an GCS export storage connection with WIF authentication to store annotations. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + bucket : typing.Optional[str] + GCS bucket name + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + description : typing.Optional[str] + Cloud storage description + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file + + google_project_id : typing.Optional[str] + Google project ID + + google_project_number : typing.Optional[str] + Google project number + + google_service_account_email : typing.Optional[str] + Google service account email + + google_wif_pool_id : typing.Optional[str] + Google WIF pool ID + + google_wif_provider_id : typing.Optional[str] + Google WIF provider ID + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + GCS bucket prefix + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifExportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.gcswif.create( + project=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/gcswif", + method="POST", + json={ + "bucket": bucket, + "can_delete_objects": can_delete_objects, + "description": description, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + "google_project_number": google_project_number, + "google_service_account_email": google_service_account_email, + "google_wif_pool_id": google_wif_pool_id, + "google_wif_provider_id": google_wif_provider_id, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifExportStorage, + construct_type( + type_=GcswifExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def validate( + self, + *, + project: int, + bucket: typing.Optional[str] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + description: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + google_project_number: typing.Optional[str] = OMIT, + google_service_account_email: typing.Optional[str] = OMIT, + google_wif_pool_id: typing.Optional[str] = OMIT, + google_wif_provider_id: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: + """ + Validate a specific GCS export storage connection that was set up with WIF authentication. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + bucket : typing.Optional[str] + GCS bucket name + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + description : typing.Optional[str] + Cloud storage description + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file + + google_project_id : typing.Optional[str] + Google project ID + + google_project_number : typing.Optional[str] + Google project number + + google_service_account_email : typing.Optional[str] + Google service account email + + google_wif_pool_id : typing.Optional[str] + Google WIF pool ID + + google_wif_provider_id : typing.Optional[str] + Google WIF provider ID + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + GCS bucket prefix + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.gcswif.validate( + project=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/export/gcswif/validate", + method="POST", + json={ + "bucket": bucket, + "can_delete_objects": can_delete_objects, + "description": description, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + "google_project_number": google_project_number, + "google_service_account_email": google_service_account_email, + "google_wif_pool_id": google_wif_pool_id, + "google_wif_provider_id": google_wif_provider_id, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcswifExportStorage: + """ + Get a specific GCS export storage connection that was set up with WIF authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifExportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.gcswif.get( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcswif/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifExportStorage, + construct_type( + type_=GcswifExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific GCS export storage connection that was set up with WIF authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.gcswif.delete( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcswif/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def update( + self, + id: int, + *, + bucket: typing.Optional[str] = OMIT, + can_delete_objects: typing.Optional[bool] = OMIT, + description: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + google_project_number: typing.Optional[str] = OMIT, + google_service_account_email: typing.Optional[str] = OMIT, + google_wif_pool_id: typing.Optional[str] = OMIT, + google_wif_provider_id: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + project: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> GcswifExportStorage: + """ + Update a specific GCS export storage connection that was set up with WIF authentication. + + Parameters + ---------- + id : int + + bucket : typing.Optional[str] + GCS bucket name + + can_delete_objects : typing.Optional[bool] + Deletion from storage enabled + + description : typing.Optional[str] + Cloud storage description + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file + + google_project_id : typing.Optional[str] + Google project ID + + google_project_number : typing.Optional[str] + Google project number + + google_service_account_email : typing.Optional[str] + Google service account email + + google_wif_pool_id : typing.Optional[str] + Google WIF pool ID + + google_wif_provider_id : typing.Optional[str] + Google WIF provider ID + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + GCS bucket prefix + + project : typing.Optional[int] + A unique integer value identifying this project. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifExportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.gcswif.update( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcswif/{jsonable_encoder(id)}", + method="PATCH", + json={ + "bucket": bucket, + "can_delete_objects": can_delete_objects, + "description": description, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + "google_project_number": google_project_number, + "google_service_account_email": google_service_account_email, + "google_wif_pool_id": google_wif_pool_id, + "google_wif_provider_id": google_wif_provider_id, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifExportStorage, + construct_type( + type_=GcswifExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcswifExportStorage: + """ + Sync tasks from an GCS WIF export storage. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifExportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.export_storage.gcswif.sync( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/export/gcswif/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifExportStorage, + construct_type( + type_=GcswifExportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/import_storage/__init__.py b/src/label_studio_sdk/import_storage/__init__.py index 41155d20f..0d66ae1b9 100644 --- a/src/label_studio_sdk/import_storage/__init__.py +++ b/src/label_studio_sdk/import_storage/__init__.py @@ -1,6 +1,6 @@ # This file was auto-generated by Fern from our API Definition. from .types import ImportStorageListTypesResponseItem -from . import azure, gcs, local, redis, s3, s3s +from . import azure, azure_spi, gcs, gcswif, local, redis, s3, s3s -__all__ = ["ImportStorageListTypesResponseItem", "azure", "gcs", "local", "redis", "s3", "s3s"] +__all__ = ["ImportStorageListTypesResponseItem", "azure", "azure_spi", "gcs", "gcswif", "local", "redis", "s3", "s3s"] diff --git a/src/label_studio_sdk/import_storage/azure_spi/__init__.py b/src/label_studio_sdk/import_storage/azure_spi/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/src/label_studio_sdk/import_storage/azure_spi/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/label_studio_sdk/import_storage/azure_spi/client.py b/src/label_studio_sdk/import_storage/azure_spi/client.py new file mode 100644 index 000000000..2dcc6a22d --- /dev/null +++ b/src/label_studio_sdk/import_storage/azure_spi/client.py @@ -0,0 +1,1378 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from ...core.client_wrapper import SyncClientWrapper +from ...core.request_options import RequestOptions +from ...types.azure_service_principal_import_storage import AzureServicePrincipalImportStorage +from ...core.unchecked_base_model import construct_type +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError +import datetime as dt +from ...types.status_c5a_enum import StatusC5AEnum +from ...core.jsonable_encoder import jsonable_encoder +from ...core.client_wrapper import AsyncClientWrapper + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class AzureSpiClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + ordering: typing.Optional[str] = None, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.List[AzureServicePrincipalImportStorage]: + """ + Get list of all Azure import storage connections set up with Service Principal authentication. + + Parameters + ---------- + ordering : typing.Optional[str] + Which field to use when ordering the results. + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[AzureServicePrincipalImportStorage] + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.azure_spi.list() + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/azure_spi/", + method="GET", + params={ + "ordering": ordering, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[AzureServicePrincipalImportStorage], + construct_type( + type_=typing.List[AzureServicePrincipalImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def create( + self, + *, + project: int, + account_name: typing.Optional[str] = OMIT, + client_id: typing.Optional[str] = OMIT, + client_secret: typing.Optional[str] = OMIT, + container: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + tenant_id: typing.Optional[str] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + user_delegation_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AzureServicePrincipalImportStorage: + """ + Create Azure import storage with Service Principal authentication. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + account_name : typing.Optional[str] + Azure Blob account name + + client_id : typing.Optional[str] + Azure Blob Service Principal Client ID + + client_secret : typing.Optional[str] + Azure Blob Service Principal Client Secret + + container : typing.Optional[str] + Azure blob container + + description : typing.Optional[str] + Cloud storage description + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + Azure blob prefix name + + presign : typing.Optional[bool] + + presign_ttl : typing.Optional[int] + Presigned URLs TTL (in minutes) + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + tenant_id : typing.Optional[str] + Azure Tenant ID + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + user_delegation_key : typing.Optional[str] + User Delegation Key (Backend) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalImportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.azure_spi.create( + project=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/azure_spi/", + method="POST", + json={ + "account_name": account_name, + "client_id": client_id, + "client_secret": client_secret, + "container": container, + "description": description, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "presign": presign, + "presign_ttl": presign_ttl, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "tenant_id": tenant_id, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + "user_delegation_key": user_delegation_key, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalImportStorage, + construct_type( + type_=AzureServicePrincipalImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def validate( + self, + *, + project: int, + account_name: typing.Optional[str] = OMIT, + client_id: typing.Optional[str] = OMIT, + client_secret: typing.Optional[str] = OMIT, + container: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + tenant_id: typing.Optional[str] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + user_delegation_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: + """ + Validate a specific Azure import storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + account_name : typing.Optional[str] + Azure Blob account name + + client_id : typing.Optional[str] + Azure Blob Service Principal Client ID + + client_secret : typing.Optional[str] + Azure Blob Service Principal Client Secret + + container : typing.Optional[str] + Azure blob container + + description : typing.Optional[str] + Cloud storage description + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + Azure blob prefix name + + presign : typing.Optional[bool] + + presign_ttl : typing.Optional[int] + Presigned URLs TTL (in minutes) + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + tenant_id : typing.Optional[str] + Azure Tenant ID + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + user_delegation_key : typing.Optional[str] + User Delegation Key (Backend) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.azure_spi.validate( + project=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/azure_spi/validate", + method="POST", + json={ + "account_name": account_name, + "client_id": client_id, + "client_secret": client_secret, + "container": container, + "description": description, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "presign": presign, + "presign_ttl": presign_ttl, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "tenant_id": tenant_id, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + "user_delegation_key": user_delegation_key, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AzureServicePrincipalImportStorage: + """ + Get a specific Azure import storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalImportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.azure_spi.get( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure_spi/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalImportStorage, + construct_type( + type_=AzureServicePrincipalImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific Azure import storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.azure_spi.delete( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure_spi/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def update( + self, + id: int, + *, + account_name: typing.Optional[str] = OMIT, + client_id: typing.Optional[str] = OMIT, + client_secret: typing.Optional[str] = OMIT, + container: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + tenant_id: typing.Optional[str] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + user_delegation_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AzureServicePrincipalImportStorage: + """ + Update a specific Azure import storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + account_name : typing.Optional[str] + Azure Blob account name + + client_id : typing.Optional[str] + Azure Blob Service Principal Client ID + + client_secret : typing.Optional[str] + Azure Blob Service Principal Client Secret + + container : typing.Optional[str] + Azure blob container + + description : typing.Optional[str] + Cloud storage description + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + Azure blob prefix name + + presign : typing.Optional[bool] + + presign_ttl : typing.Optional[int] + Presigned URLs TTL (in minutes) + + project : typing.Optional[int] + A unique integer value identifying this project. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + tenant_id : typing.Optional[str] + Azure Tenant ID + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + user_delegation_key : typing.Optional[str] + User Delegation Key (Backend) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalImportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.azure_spi.update( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure_spi/{jsonable_encoder(id)}", + method="PATCH", + json={ + "account_name": account_name, + "client_id": client_id, + "client_secret": client_secret, + "container": container, + "description": description, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "presign": presign, + "presign_ttl": presign_ttl, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "tenant_id": tenant_id, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + "user_delegation_key": user_delegation_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalImportStorage, + construct_type( + type_=AzureServicePrincipalImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AzureServicePrincipalImportStorage: + """ + Sync tasks from an Azure import storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalImportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.azure_spi.sync( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/azure_spi/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalImportStorage, + construct_type( + type_=AzureServicePrincipalImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncAzureSpiClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + ordering: typing.Optional[str] = None, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.List[AzureServicePrincipalImportStorage]: + """ + Get list of all Azure import storage connections set up with Service Principal authentication. + + Parameters + ---------- + ordering : typing.Optional[str] + Which field to use when ordering the results. + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[AzureServicePrincipalImportStorage] + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.azure_spi.list() + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/azure_spi/", + method="GET", + params={ + "ordering": ordering, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[AzureServicePrincipalImportStorage], + construct_type( + type_=typing.List[AzureServicePrincipalImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def create( + self, + *, + project: int, + account_name: typing.Optional[str] = OMIT, + client_id: typing.Optional[str] = OMIT, + client_secret: typing.Optional[str] = OMIT, + container: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + tenant_id: typing.Optional[str] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + user_delegation_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AzureServicePrincipalImportStorage: + """ + Create Azure import storage with Service Principal authentication. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + account_name : typing.Optional[str] + Azure Blob account name + + client_id : typing.Optional[str] + Azure Blob Service Principal Client ID + + client_secret : typing.Optional[str] + Azure Blob Service Principal Client Secret + + container : typing.Optional[str] + Azure blob container + + description : typing.Optional[str] + Cloud storage description + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + Azure blob prefix name + + presign : typing.Optional[bool] + + presign_ttl : typing.Optional[int] + Presigned URLs TTL (in minutes) + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + tenant_id : typing.Optional[str] + Azure Tenant ID + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + user_delegation_key : typing.Optional[str] + User Delegation Key (Backend) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalImportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.azure_spi.create( + project=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/azure_spi/", + method="POST", + json={ + "account_name": account_name, + "client_id": client_id, + "client_secret": client_secret, + "container": container, + "description": description, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "presign": presign, + "presign_ttl": presign_ttl, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "tenant_id": tenant_id, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + "user_delegation_key": user_delegation_key, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalImportStorage, + construct_type( + type_=AzureServicePrincipalImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def validate( + self, + *, + project: int, + account_name: typing.Optional[str] = OMIT, + client_id: typing.Optional[str] = OMIT, + client_secret: typing.Optional[str] = OMIT, + container: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + tenant_id: typing.Optional[str] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + user_delegation_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: + """ + Validate a specific Azure import storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + account_name : typing.Optional[str] + Azure Blob account name + + client_id : typing.Optional[str] + Azure Blob Service Principal Client ID + + client_secret : typing.Optional[str] + Azure Blob Service Principal Client Secret + + container : typing.Optional[str] + Azure blob container + + description : typing.Optional[str] + Cloud storage description + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + Azure blob prefix name + + presign : typing.Optional[bool] + + presign_ttl : typing.Optional[int] + Presigned URLs TTL (in minutes) + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + tenant_id : typing.Optional[str] + Azure Tenant ID + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + user_delegation_key : typing.Optional[str] + User Delegation Key (Backend) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.azure_spi.validate( + project=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/azure_spi/validate", + method="POST", + json={ + "account_name": account_name, + "client_id": client_id, + "client_secret": client_secret, + "container": container, + "description": description, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "presign": presign, + "presign_ttl": presign_ttl, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "tenant_id": tenant_id, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + "user_delegation_key": user_delegation_key, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def get( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AzureServicePrincipalImportStorage: + """ + Get a specific Azure import storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalImportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.azure_spi.get( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure_spi/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalImportStorage, + construct_type( + type_=AzureServicePrincipalImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific Azure import storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.azure_spi.delete( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure_spi/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def update( + self, + id: int, + *, + account_name: typing.Optional[str] = OMIT, + client_id: typing.Optional[str] = OMIT, + client_secret: typing.Optional[str] = OMIT, + container: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + tenant_id: typing.Optional[str] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + user_delegation_key: typing.Optional[str] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> AzureServicePrincipalImportStorage: + """ + Update a specific Azure import storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + account_name : typing.Optional[str] + Azure Blob account name + + client_id : typing.Optional[str] + Azure Blob Service Principal Client ID + + client_secret : typing.Optional[str] + Azure Blob Service Principal Client Secret + + container : typing.Optional[str] + Azure blob container + + description : typing.Optional[str] + Cloud storage description + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + Azure blob prefix name + + presign : typing.Optional[bool] + + presign_ttl : typing.Optional[int] + Presigned URLs TTL (in minutes) + + project : typing.Optional[int] + A unique integer value identifying this project. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + tenant_id : typing.Optional[str] + Azure Tenant ID + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + user_delegation_key : typing.Optional[str] + User Delegation Key (Backend) + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalImportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.azure_spi.update( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure_spi/{jsonable_encoder(id)}", + method="PATCH", + json={ + "account_name": account_name, + "client_id": client_id, + "client_secret": client_secret, + "container": container, + "description": description, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "presign": presign, + "presign_ttl": presign_ttl, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "tenant_id": tenant_id, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + "user_delegation_key": user_delegation_key, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalImportStorage, + construct_type( + type_=AzureServicePrincipalImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def sync( + self, id: int, *, request_options: typing.Optional[RequestOptions] = None + ) -> AzureServicePrincipalImportStorage: + """ + Sync tasks from an Azure import storage connection that was set up with Service Principal authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AzureServicePrincipalImportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.azure_spi.sync( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/azure_spi/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + AzureServicePrincipalImportStorage, + construct_type( + type_=AzureServicePrincipalImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/import_storage/client.py b/src/label_studio_sdk/import_storage/client.py index 4372a48a9..887f77cde 100644 --- a/src/label_studio_sdk/import_storage/client.py +++ b/src/label_studio_sdk/import_storage/client.py @@ -2,7 +2,9 @@ from ..core.client_wrapper import SyncClientWrapper from .azure.client import AzureClient +from .azure_spi.client import AzureSpiClient from .gcs.client import GcsClient +from .gcswif.client import GcswifClient from .local.client import LocalClient from .redis.client import RedisClient from .s3.client import S3Client @@ -15,7 +17,9 @@ from ..core.api_error import ApiError from ..core.client_wrapper import AsyncClientWrapper from .azure.client import AsyncAzureClient +from .azure_spi.client import AsyncAzureSpiClient from .gcs.client import AsyncGcsClient +from .gcswif.client import AsyncGcswifClient from .local.client import AsyncLocalClient from .redis.client import AsyncRedisClient from .s3.client import AsyncS3Client @@ -26,7 +30,9 @@ class ImportStorageClient: def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper self.azure = AzureClient(client_wrapper=self._client_wrapper) + self.azure_spi = AzureSpiClient(client_wrapper=self._client_wrapper) self.gcs = GcsClient(client_wrapper=self._client_wrapper) + self.gcswif = GcswifClient(client_wrapper=self._client_wrapper) self.local = LocalClient(client_wrapper=self._client_wrapper) self.redis = RedisClient(client_wrapper=self._client_wrapper) self.s3 = S3Client(client_wrapper=self._client_wrapper) @@ -81,7 +87,9 @@ class AsyncImportStorageClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper self.azure = AsyncAzureClient(client_wrapper=self._client_wrapper) + self.azure_spi = AsyncAzureSpiClient(client_wrapper=self._client_wrapper) self.gcs = AsyncGcsClient(client_wrapper=self._client_wrapper) + self.gcswif = AsyncGcswifClient(client_wrapper=self._client_wrapper) self.local = AsyncLocalClient(client_wrapper=self._client_wrapper) self.redis = AsyncRedisClient(client_wrapper=self._client_wrapper) self.s3 = AsyncS3Client(client_wrapper=self._client_wrapper) diff --git a/src/label_studio_sdk/import_storage/gcswif/__init__.py b/src/label_studio_sdk/import_storage/gcswif/__init__.py new file mode 100644 index 000000000..f3ea2659b --- /dev/null +++ b/src/label_studio_sdk/import_storage/gcswif/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/label_studio_sdk/import_storage/gcswif/client.py b/src/label_studio_sdk/import_storage/gcswif/client.py new file mode 100644 index 000000000..9fd522546 --- /dev/null +++ b/src/label_studio_sdk/import_storage/gcswif/client.py @@ -0,0 +1,1400 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +from ...core.client_wrapper import SyncClientWrapper +from ...core.request_options import RequestOptions +from ...types.gcswif_import_storage import GcswifImportStorage +from ...core.unchecked_base_model import construct_type +from json.decoder import JSONDecodeError +from ...core.api_error import ApiError +import datetime as dt +from ...types.status_c5a_enum import StatusC5AEnum +from ...core.jsonable_encoder import jsonable_encoder +from ...core.client_wrapper import AsyncClientWrapper + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class GcswifClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list( + self, + *, + ordering: typing.Optional[str] = None, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.List[GcswifImportStorage]: + """ + Get list of all GCS import storage connections set up with WIF authentication. + + Parameters + ---------- + ordering : typing.Optional[str] + Which field to use when ordering the results. + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[GcswifImportStorage] + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.gcswif.list() + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/gcswif/", + method="GET", + params={ + "ordering": ordering, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[GcswifImportStorage], + construct_type( + type_=typing.List[GcswifImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def create( + self, + *, + project: int, + bucket: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + google_project_number: typing.Optional[str] = OMIT, + google_service_account_email: typing.Optional[str] = OMIT, + google_wif_pool_id: typing.Optional[str] = OMIT, + google_wif_provider_id: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> GcswifImportStorage: + """ + Create GCS import storage with WIF. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + bucket : typing.Optional[str] + GCS bucket name + + description : typing.Optional[str] + Cloud storage description + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file + + google_project_id : typing.Optional[str] + Google project ID + + google_project_number : typing.Optional[str] + Google project number + + google_service_account_email : typing.Optional[str] + Google service account email + + google_wif_pool_id : typing.Optional[str] + Google WIF pool ID + + google_wif_provider_id : typing.Optional[str] + Google WIF provider ID + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + GCS bucket prefix + + presign : typing.Optional[bool] + + presign_ttl : typing.Optional[int] + Presigned URLs TTL (in minutes) + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifImportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.gcswif.create( + project=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/gcswif/", + method="POST", + json={ + "bucket": bucket, + "description": description, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + "google_project_number": google_project_number, + "google_service_account_email": google_service_account_email, + "google_wif_pool_id": google_wif_pool_id, + "google_wif_provider_id": google_wif_provider_id, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "presign": presign, + "presign_ttl": presign_ttl, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifImportStorage, + construct_type( + type_=GcswifImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def validate( + self, + *, + project: int, + bucket: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + google_project_number: typing.Optional[str] = OMIT, + google_service_account_email: typing.Optional[str] = OMIT, + google_wif_pool_id: typing.Optional[str] = OMIT, + google_wif_provider_id: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: + """ + Validate a specific GCS import storage connection that was set up with WIF authentication. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + bucket : typing.Optional[str] + GCS bucket name + + description : typing.Optional[str] + Cloud storage description + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file + + google_project_id : typing.Optional[str] + Google project ID + + google_project_number : typing.Optional[str] + Google project number + + google_service_account_email : typing.Optional[str] + Google service account email + + google_wif_pool_id : typing.Optional[str] + Google WIF pool ID + + google_wif_provider_id : typing.Optional[str] + Google WIF provider ID + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + GCS bucket prefix + + presign : typing.Optional[bool] + + presign_ttl : typing.Optional[int] + Presigned URLs TTL (in minutes) + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.gcswif.validate( + project=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + "api/storages/gcswif/validate", + method="POST", + json={ + "bucket": bucket, + "description": description, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + "google_project_number": google_project_number, + "google_service_account_email": google_service_account_email, + "google_wif_pool_id": google_wif_pool_id, + "google_wif_provider_id": google_wif_provider_id, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "presign": presign, + "presign_ttl": presign_ttl, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcswifImportStorage: + """ + Get a specific GCS import storage connection that was set up with WIF. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifImportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.gcswif.get( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcswif/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifImportStorage, + construct_type( + type_=GcswifImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific GCS import storage connection that was set up with WIF authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.gcswif.delete( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcswif/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def update( + self, + id: int, + *, + bucket: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + google_project_number: typing.Optional[str] = OMIT, + google_service_account_email: typing.Optional[str] = OMIT, + google_wif_pool_id: typing.Optional[str] = OMIT, + google_wif_provider_id: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> GcswifImportStorage: + """ + Update a specific GCS import storage connection that was set up with WIF authentication. + + Parameters + ---------- + id : int + + bucket : typing.Optional[str] + GCS bucket name + + description : typing.Optional[str] + Cloud storage description + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file + + google_project_id : typing.Optional[str] + Google project ID + + google_project_number : typing.Optional[str] + Google project number + + google_service_account_email : typing.Optional[str] + Google service account email + + google_wif_pool_id : typing.Optional[str] + Google WIF pool ID + + google_wif_provider_id : typing.Optional[str] + Google WIF provider ID + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + GCS bucket prefix + + presign : typing.Optional[bool] + + presign_ttl : typing.Optional[int] + Presigned URLs TTL (in minutes) + + project : typing.Optional[int] + A unique integer value identifying this project. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifImportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.gcswif.update( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcswif/{jsonable_encoder(id)}", + method="PATCH", + json={ + "bucket": bucket, + "description": description, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + "google_project_number": google_project_number, + "google_service_account_email": google_service_account_email, + "google_wif_pool_id": google_wif_pool_id, + "google_wif_provider_id": google_wif_provider_id, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "presign": presign, + "presign_ttl": presign_ttl, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifImportStorage, + construct_type( + type_=GcswifImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcswifImportStorage: + """ + Sync tasks from an GCS import storage connection that was set up with WIF authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifImportStorage + + + Examples + -------- + from label_studio_sdk import LabelStudio + + client = LabelStudio( + api_key="YOUR_API_KEY", + ) + client.import_storage.gcswif.sync( + id=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"api/storages/gcswif/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifImportStorage, + construct_type( + type_=GcswifImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncGcswifClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list( + self, + *, + ordering: typing.Optional[str] = None, + project: typing.Optional[int] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> typing.List[GcswifImportStorage]: + """ + Get list of all GCS import storage connections set up with WIF authentication. + + Parameters + ---------- + ordering : typing.Optional[str] + Which field to use when ordering the results. + + project : typing.Optional[int] + Project ID + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + typing.List[GcswifImportStorage] + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.gcswif.list() + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/gcswif/", + method="GET", + params={ + "ordering": ordering, + "project": project, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + typing.List[GcswifImportStorage], + construct_type( + type_=typing.List[GcswifImportStorage], # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def create( + self, + *, + project: int, + bucket: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + google_project_number: typing.Optional[str] = OMIT, + google_service_account_email: typing.Optional[str] = OMIT, + google_wif_pool_id: typing.Optional[str] = OMIT, + google_wif_provider_id: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> GcswifImportStorage: + """ + Create GCS import storage with WIF. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + bucket : typing.Optional[str] + GCS bucket name + + description : typing.Optional[str] + Cloud storage description + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file + + google_project_id : typing.Optional[str] + Google project ID + + google_project_number : typing.Optional[str] + Google project number + + google_service_account_email : typing.Optional[str] + Google service account email + + google_wif_pool_id : typing.Optional[str] + Google WIF pool ID + + google_wif_provider_id : typing.Optional[str] + Google WIF provider ID + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + GCS bucket prefix + + presign : typing.Optional[bool] + + presign_ttl : typing.Optional[int] + Presigned URLs TTL (in minutes) + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifImportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.gcswif.create( + project=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/gcswif/", + method="POST", + json={ + "bucket": bucket, + "description": description, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + "google_project_number": google_project_number, + "google_service_account_email": google_service_account_email, + "google_wif_pool_id": google_wif_pool_id, + "google_wif_provider_id": google_wif_provider_id, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "presign": presign, + "presign_ttl": presign_ttl, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifImportStorage, + construct_type( + type_=GcswifImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def validate( + self, + *, + project: int, + bucket: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + google_project_number: typing.Optional[str] = OMIT, + google_service_account_email: typing.Optional[str] = OMIT, + google_wif_pool_id: typing.Optional[str] = OMIT, + google_wif_provider_id: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> None: + """ + Validate a specific GCS import storage connection that was set up with WIF authentication. + + Parameters + ---------- + project : int + A unique integer value identifying this project. + + bucket : typing.Optional[str] + GCS bucket name + + description : typing.Optional[str] + Cloud storage description + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file + + google_project_id : typing.Optional[str] + Google project ID + + google_project_number : typing.Optional[str] + Google project number + + google_service_account_email : typing.Optional[str] + Google service account email + + google_wif_pool_id : typing.Optional[str] + Google WIF pool ID + + google_wif_provider_id : typing.Optional[str] + Google WIF provider ID + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + GCS bucket prefix + + presign : typing.Optional[bool] + + presign_ttl : typing.Optional[int] + Presigned URLs TTL (in minutes) + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.gcswif.validate( + project=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "api/storages/gcswif/validate", + method="POST", + json={ + "bucket": bucket, + "description": description, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + "google_project_number": google_project_number, + "google_service_account_email": google_service_account_email, + "google_wif_pool_id": google_wif_pool_id, + "google_wif_provider_id": google_wif_provider_id, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "presign": presign, + "presign_ttl": presign_ttl, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def get(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcswifImportStorage: + """ + Get a specific GCS import storage connection that was set up with WIF. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifImportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.gcswif.get( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcswif/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifImportStorage, + construct_type( + type_=GcswifImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None: + """ + Delete a specific GCS import storage connection that was set up with WIF authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + None + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.gcswif.delete( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcswif/{jsonable_encoder(id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def update( + self, + id: int, + *, + bucket: typing.Optional[str] = OMIT, + description: typing.Optional[str] = OMIT, + google_application_credentials: typing.Optional[str] = OMIT, + google_project_id: typing.Optional[str] = OMIT, + google_project_number: typing.Optional[str] = OMIT, + google_service_account_email: typing.Optional[str] = OMIT, + google_wif_pool_id: typing.Optional[str] = OMIT, + google_wif_provider_id: typing.Optional[str] = OMIT, + last_sync: typing.Optional[dt.datetime] = OMIT, + last_sync_count: typing.Optional[int] = OMIT, + last_sync_job: typing.Optional[str] = OMIT, + meta: typing.Optional[typing.Optional[typing.Any]] = OMIT, + prefix: typing.Optional[str] = OMIT, + presign: typing.Optional[bool] = OMIT, + presign_ttl: typing.Optional[int] = OMIT, + project: typing.Optional[int] = OMIT, + regex_filter: typing.Optional[str] = OMIT, + status: typing.Optional[StatusC5AEnum] = OMIT, + synchronizable: typing.Optional[bool] = OMIT, + title: typing.Optional[str] = OMIT, + traceback: typing.Optional[str] = OMIT, + use_blob_urls: typing.Optional[bool] = OMIT, + request_options: typing.Optional[RequestOptions] = None, + ) -> GcswifImportStorage: + """ + Update a specific GCS import storage connection that was set up with WIF authentication. + + Parameters + ---------- + id : int + + bucket : typing.Optional[str] + GCS bucket name + + description : typing.Optional[str] + Cloud storage description + + google_application_credentials : typing.Optional[str] + The content of GOOGLE_APPLICATION_CREDENTIALS json file + + google_project_id : typing.Optional[str] + Google project ID + + google_project_number : typing.Optional[str] + Google project number + + google_service_account_email : typing.Optional[str] + Google service account email + + google_wif_pool_id : typing.Optional[str] + Google WIF pool ID + + google_wif_provider_id : typing.Optional[str] + Google WIF provider ID + + last_sync : typing.Optional[dt.datetime] + Last sync finished time + + last_sync_count : typing.Optional[int] + Count of tasks synced last time + + last_sync_job : typing.Optional[str] + Last sync job ID + + meta : typing.Optional[typing.Optional[typing.Any]] + + prefix : typing.Optional[str] + GCS bucket prefix + + presign : typing.Optional[bool] + + presign_ttl : typing.Optional[int] + Presigned URLs TTL (in minutes) + + project : typing.Optional[int] + A unique integer value identifying this project. + + regex_filter : typing.Optional[str] + Cloud storage regex for filtering objects + + status : typing.Optional[StatusC5AEnum] + + synchronizable : typing.Optional[bool] + + title : typing.Optional[str] + Cloud storage title + + traceback : typing.Optional[str] + Traceback report for the last failed sync + + use_blob_urls : typing.Optional[bool] + Interpret objects as BLOBs and generate URLs + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifImportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.gcswif.update( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcswif/{jsonable_encoder(id)}", + method="PATCH", + json={ + "bucket": bucket, + "description": description, + "google_application_credentials": google_application_credentials, + "google_project_id": google_project_id, + "google_project_number": google_project_number, + "google_service_account_email": google_service_account_email, + "google_wif_pool_id": google_wif_pool_id, + "google_wif_provider_id": google_wif_provider_id, + "last_sync": last_sync, + "last_sync_count": last_sync_count, + "last_sync_job": last_sync_job, + "meta": meta, + "prefix": prefix, + "presign": presign, + "presign_ttl": presign_ttl, + "project": project, + "regex_filter": regex_filter, + "status": status, + "synchronizable": synchronizable, + "title": title, + "traceback": traceback, + "use_blob_urls": use_blob_urls, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifImportStorage, + construct_type( + type_=GcswifImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def sync(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> GcswifImportStorage: + """ + Sync tasks from an GCS import storage connection that was set up with WIF authentication. + + Parameters + ---------- + id : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + GcswifImportStorage + + + Examples + -------- + import asyncio + + from label_studio_sdk import AsyncLabelStudio + + client = AsyncLabelStudio( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.import_storage.gcswif.sync( + id=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"api/storages/gcswif/{jsonable_encoder(id)}/sync", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + GcswifImportStorage, + construct_type( + type_=GcswifImportStorage, # type: ignore + object_=_response.json(), + ), + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/label_studio_sdk/types/__init__.py b/src/label_studio_sdk/types/__init__.py index 254a63872..6448e6cb7 100644 --- a/src/label_studio_sdk/types/__init__.py +++ b/src/label_studio_sdk/types/__init__.py @@ -26,6 +26,10 @@ from .auth_method_enum import AuthMethodEnum from .azure_blob_export_storage import AzureBlobExportStorage from .azure_blob_import_storage import AzureBlobImportStorage +from .azure_service_principal_export_storage import AzureServicePrincipalExportStorage +from .azure_service_principal_export_storage_request import AzureServicePrincipalExportStorageRequest +from .azure_service_principal_import_storage import AzureServicePrincipalImportStorage +from .azure_service_principal_import_storage_request import AzureServicePrincipalImportStorageRequest from .batch_failed_predictions import BatchFailedPredictions from .batch_predictions import BatchPredictions from .billing_checks import BillingChecks @@ -52,6 +56,10 @@ from .finished_enum import FinishedEnum from .gcs_export_storage import GcsExportStorage from .gcs_import_storage import GcsImportStorage +from .gcswif_export_storage import GcswifExportStorage +from .gcswif_export_storage_request import GcswifExportStorageRequest +from .gcswif_import_storage import GcswifImportStorage +from .gcswif_import_storage_request import GcswifImportStorageRequest from .hotkeys import Hotkeys from .import_api_request import ImportApiRequest from .inference_run_cost_estimate import InferenceRunCostEstimate @@ -230,6 +238,10 @@ "AuthMethodEnum", "AzureBlobExportStorage", "AzureBlobImportStorage", + "AzureServicePrincipalExportStorage", + "AzureServicePrincipalExportStorageRequest", + "AzureServicePrincipalImportStorage", + "AzureServicePrincipalImportStorageRequest", "BatchFailedPredictions", "BatchPredictions", "BillingChecks", @@ -256,6 +268,10 @@ "FinishedEnum", "GcsExportStorage", "GcsImportStorage", + "GcswifExportStorage", + "GcswifExportStorageRequest", + "GcswifImportStorage", + "GcswifImportStorageRequest", "Hotkeys", "ImportApiRequest", "InferenceRunCostEstimate", diff --git a/src/label_studio_sdk/types/azure_service_principal_export_storage.py b/src/label_studio_sdk/types/azure_service_principal_export_storage.py new file mode 100644 index 000000000..7b6582fc2 --- /dev/null +++ b/src/label_studio_sdk/types/azure_service_principal_export_storage.py @@ -0,0 +1,114 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.unchecked_base_model import UncheckedBaseModel +import typing +import pydantic +import datetime as dt +from .status_c5a_enum import StatusC5AEnum +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class AzureServicePrincipalExportStorage(UncheckedBaseModel): + account_name: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Blob account name + """ + + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) + """ + Deletion from storage enabled + """ + + client_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Blob Service Principal Client ID + """ + + client_secret: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Blob Service Principal Client Secret + """ + + container: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure blob container + """ + + created_at: dt.datetime = pydantic.Field() + """ + Creation time + """ + + description: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage description + """ + + id: int + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) + """ + Last sync finished time + """ + + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) + """ + Count of tasks synced last time + """ + + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) + """ + Last sync job ID + """ + + meta: typing.Optional[typing.Optional[typing.Any]] = None + prefix: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure blob prefix name + """ + + project: int = pydantic.Field() + """ + A unique integer value identifying this project. + """ + + regex_filter: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage regex for filtering objects + """ + + status: typing.Optional[StatusC5AEnum] = None + synchronizable: typing.Optional[bool] = None + tenant_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Tenant ID + """ + + title: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage title + """ + + traceback: typing.Optional[str] = pydantic.Field(default=None) + """ + Traceback report for the last failed sync + """ + + type: str + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) + """ + Interpret objects as BLOBs and generate URLs + """ + + user_delegation_key: typing.Optional[str] = pydantic.Field(default=None) + """ + User Delegation Key (Backend) + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/azure_service_principal_export_storage_request.py b/src/label_studio_sdk/types/azure_service_principal_export_storage_request.py new file mode 100644 index 000000000..e2fb6ee9c --- /dev/null +++ b/src/label_studio_sdk/types/azure_service_principal_export_storage_request.py @@ -0,0 +1,107 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.unchecked_base_model import UncheckedBaseModel +import typing +import pydantic +import datetime as dt +from .status_c5a_enum import StatusC5AEnum +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class AzureServicePrincipalExportStorageRequest(UncheckedBaseModel): + account_name: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Blob account name + """ + + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) + """ + Deletion from storage enabled + """ + + client_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Blob Service Principal Client ID + """ + + client_secret: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Blob Service Principal Client Secret + """ + + container: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure blob container + """ + + description: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage description + """ + + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) + """ + Last sync finished time + """ + + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) + """ + Count of tasks synced last time + """ + + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) + """ + Last sync job ID + """ + + meta: typing.Optional[typing.Optional[typing.Any]] = None + prefix: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure blob prefix name + """ + + project: int = pydantic.Field() + """ + A unique integer value identifying this project. + """ + + regex_filter: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage regex for filtering objects + """ + + status: typing.Optional[StatusC5AEnum] = None + synchronizable: typing.Optional[bool] = None + tenant_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Tenant ID + """ + + title: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage title + """ + + traceback: typing.Optional[str] = pydantic.Field(default=None) + """ + Traceback report for the last failed sync + """ + + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) + """ + Interpret objects as BLOBs and generate URLs + """ + + user_delegation_key: typing.Optional[str] = pydantic.Field(default=None) + """ + User Delegation Key (Backend) + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/azure_service_principal_import_storage.py b/src/label_studio_sdk/types/azure_service_principal_import_storage.py new file mode 100644 index 000000000..72b915bbe --- /dev/null +++ b/src/label_studio_sdk/types/azure_service_principal_import_storage.py @@ -0,0 +1,115 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.unchecked_base_model import UncheckedBaseModel +import typing +import pydantic +import datetime as dt +from .status_c5a_enum import StatusC5AEnum +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class AzureServicePrincipalImportStorage(UncheckedBaseModel): + account_name: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Blob account name + """ + + client_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Blob Service Principal Client ID + """ + + client_secret: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Blob Service Principal Client Secret + """ + + container: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure blob container + """ + + created_at: dt.datetime = pydantic.Field() + """ + Creation time + """ + + description: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage description + """ + + id: int + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) + """ + Last sync finished time + """ + + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) + """ + Count of tasks synced last time + """ + + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) + """ + Last sync job ID + """ + + meta: typing.Optional[typing.Optional[typing.Any]] = None + prefix: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure blob prefix name + """ + + presign: typing.Optional[bool] = None + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) + """ + Presigned URLs TTL (in minutes) + """ + + project: int = pydantic.Field() + """ + A unique integer value identifying this project. + """ + + regex_filter: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage regex for filtering objects + """ + + status: typing.Optional[StatusC5AEnum] = None + synchronizable: typing.Optional[bool] = None + tenant_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Tenant ID + """ + + title: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage title + """ + + traceback: typing.Optional[str] = pydantic.Field(default=None) + """ + Traceback report for the last failed sync + """ + + type: str + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) + """ + Interpret objects as BLOBs and generate URLs + """ + + user_delegation_key: typing.Optional[str] = pydantic.Field(default=None) + """ + User Delegation Key (Backend) + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/azure_service_principal_import_storage_request.py b/src/label_studio_sdk/types/azure_service_principal_import_storage_request.py new file mode 100644 index 000000000..ddf5ea445 --- /dev/null +++ b/src/label_studio_sdk/types/azure_service_principal_import_storage_request.py @@ -0,0 +1,108 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.unchecked_base_model import UncheckedBaseModel +import typing +import pydantic +import datetime as dt +from .status_c5a_enum import StatusC5AEnum +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class AzureServicePrincipalImportStorageRequest(UncheckedBaseModel): + account_name: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Blob account name + """ + + client_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Blob Service Principal Client ID + """ + + client_secret: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Blob Service Principal Client Secret + """ + + container: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure blob container + """ + + description: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage description + """ + + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) + """ + Last sync finished time + """ + + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) + """ + Count of tasks synced last time + """ + + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) + """ + Last sync job ID + """ + + meta: typing.Optional[typing.Optional[typing.Any]] = None + prefix: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure blob prefix name + """ + + presign: typing.Optional[bool] = None + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) + """ + Presigned URLs TTL (in minutes) + """ + + project: int = pydantic.Field() + """ + A unique integer value identifying this project. + """ + + regex_filter: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage regex for filtering objects + """ + + status: typing.Optional[StatusC5AEnum] = None + synchronizable: typing.Optional[bool] = None + tenant_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Azure Tenant ID + """ + + title: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage title + """ + + traceback: typing.Optional[str] = pydantic.Field(default=None) + """ + Traceback report for the last failed sync + """ + + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) + """ + Interpret objects as BLOBs and generate URLs + """ + + user_delegation_key: typing.Optional[str] = pydantic.Field(default=None) + """ + User Delegation Key (Backend) + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/gcswif_export_storage.py b/src/label_studio_sdk/types/gcswif_export_storage.py new file mode 100644 index 000000000..b7258d054 --- /dev/null +++ b/src/label_studio_sdk/types/gcswif_export_storage.py @@ -0,0 +1,119 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.unchecked_base_model import UncheckedBaseModel +import typing +import pydantic +import datetime as dt +from .status_c5a_enum import StatusC5AEnum +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class GcswifExportStorage(UncheckedBaseModel): + bucket: typing.Optional[str] = pydantic.Field(default=None) + """ + GCS bucket name + """ + + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) + """ + Deletion from storage enabled + """ + + created_at: dt.datetime = pydantic.Field() + """ + Creation time + """ + + description: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage description + """ + + google_application_credentials: typing.Optional[str] = pydantic.Field(default=None) + """ + The content of GOOGLE_APPLICATION_CREDENTIALS json file + """ + + google_project_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Google project ID + """ + + google_project_number: typing.Optional[str] = pydantic.Field(default=None) + """ + Google project number + """ + + google_service_account_email: typing.Optional[str] = pydantic.Field(default=None) + """ + Google service account email + """ + + google_wif_pool_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Google WIF pool ID + """ + + google_wif_provider_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Google WIF provider ID + """ + + id: int + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) + """ + Last sync finished time + """ + + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) + """ + Count of tasks synced last time + """ + + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) + """ + Last sync job ID + """ + + meta: typing.Optional[typing.Optional[typing.Any]] = None + prefix: typing.Optional[str] = pydantic.Field(default=None) + """ + GCS bucket prefix + """ + + project: int = pydantic.Field() + """ + A unique integer value identifying this project. + """ + + regex_filter: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage regex for filtering objects + """ + + status: typing.Optional[StatusC5AEnum] = None + synchronizable: typing.Optional[bool] = None + title: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage title + """ + + traceback: typing.Optional[str] = pydantic.Field(default=None) + """ + Traceback report for the last failed sync + """ + + type: str + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) + """ + Interpret objects as BLOBs and generate URLs + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/gcswif_export_storage_request.py b/src/label_studio_sdk/types/gcswif_export_storage_request.py new file mode 100644 index 000000000..fb05b9ee5 --- /dev/null +++ b/src/label_studio_sdk/types/gcswif_export_storage_request.py @@ -0,0 +1,112 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.unchecked_base_model import UncheckedBaseModel +import typing +import pydantic +import datetime as dt +from .status_c5a_enum import StatusC5AEnum +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class GcswifExportStorageRequest(UncheckedBaseModel): + bucket: typing.Optional[str] = pydantic.Field(default=None) + """ + GCS bucket name + """ + + can_delete_objects: typing.Optional[bool] = pydantic.Field(default=None) + """ + Deletion from storage enabled + """ + + description: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage description + """ + + google_application_credentials: typing.Optional[str] = pydantic.Field(default=None) + """ + The content of GOOGLE_APPLICATION_CREDENTIALS json file + """ + + google_project_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Google project ID + """ + + google_project_number: typing.Optional[str] = pydantic.Field(default=None) + """ + Google project number + """ + + google_service_account_email: typing.Optional[str] = pydantic.Field(default=None) + """ + Google service account email + """ + + google_wif_pool_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Google WIF pool ID + """ + + google_wif_provider_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Google WIF provider ID + """ + + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) + """ + Last sync finished time + """ + + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) + """ + Count of tasks synced last time + """ + + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) + """ + Last sync job ID + """ + + meta: typing.Optional[typing.Optional[typing.Any]] = None + prefix: typing.Optional[str] = pydantic.Field(default=None) + """ + GCS bucket prefix + """ + + project: int = pydantic.Field() + """ + A unique integer value identifying this project. + """ + + regex_filter: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage regex for filtering objects + """ + + status: typing.Optional[StatusC5AEnum] = None + synchronizable: typing.Optional[bool] = None + title: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage title + """ + + traceback: typing.Optional[str] = pydantic.Field(default=None) + """ + Traceback report for the last failed sync + """ + + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) + """ + Interpret objects as BLOBs and generate URLs + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/gcswif_import_storage.py b/src/label_studio_sdk/types/gcswif_import_storage.py new file mode 100644 index 000000000..99d3204e1 --- /dev/null +++ b/src/label_studio_sdk/types/gcswif_import_storage.py @@ -0,0 +1,120 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.unchecked_base_model import UncheckedBaseModel +import typing +import pydantic +import datetime as dt +from .status_c5a_enum import StatusC5AEnum +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class GcswifImportStorage(UncheckedBaseModel): + bucket: typing.Optional[str] = pydantic.Field(default=None) + """ + GCS bucket name + """ + + created_at: dt.datetime = pydantic.Field() + """ + Creation time + """ + + description: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage description + """ + + google_application_credentials: typing.Optional[str] = pydantic.Field(default=None) + """ + The content of GOOGLE_APPLICATION_CREDENTIALS json file + """ + + google_project_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Google project ID + """ + + google_project_number: typing.Optional[str] = pydantic.Field(default=None) + """ + Google project number + """ + + google_service_account_email: typing.Optional[str] = pydantic.Field(default=None) + """ + Google service account email + """ + + google_wif_pool_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Google WIF pool ID + """ + + google_wif_provider_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Google WIF provider ID + """ + + id: int + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) + """ + Last sync finished time + """ + + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) + """ + Count of tasks synced last time + """ + + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) + """ + Last sync job ID + """ + + meta: typing.Optional[typing.Optional[typing.Any]] = None + prefix: typing.Optional[str] = pydantic.Field(default=None) + """ + GCS bucket prefix + """ + + presign: typing.Optional[bool] = None + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) + """ + Presigned URLs TTL (in minutes) + """ + + project: int = pydantic.Field() + """ + A unique integer value identifying this project. + """ + + regex_filter: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage regex for filtering objects + """ + + status: typing.Optional[StatusC5AEnum] = None + synchronizable: typing.Optional[bool] = None + title: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage title + """ + + traceback: typing.Optional[str] = pydantic.Field(default=None) + """ + Traceback report for the last failed sync + """ + + type: str + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) + """ + Interpret objects as BLOBs and generate URLs + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/label_studio_sdk/types/gcswif_import_storage_request.py b/src/label_studio_sdk/types/gcswif_import_storage_request.py new file mode 100644 index 000000000..1517240f3 --- /dev/null +++ b/src/label_studio_sdk/types/gcswif_import_storage_request.py @@ -0,0 +1,113 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.unchecked_base_model import UncheckedBaseModel +import typing +import pydantic +import datetime as dt +from .status_c5a_enum import StatusC5AEnum +from ..core.pydantic_utilities import IS_PYDANTIC_V2 + + +class GcswifImportStorageRequest(UncheckedBaseModel): + bucket: typing.Optional[str] = pydantic.Field(default=None) + """ + GCS bucket name + """ + + description: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage description + """ + + google_application_credentials: typing.Optional[str] = pydantic.Field(default=None) + """ + The content of GOOGLE_APPLICATION_CREDENTIALS json file + """ + + google_project_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Google project ID + """ + + google_project_number: typing.Optional[str] = pydantic.Field(default=None) + """ + Google project number + """ + + google_service_account_email: typing.Optional[str] = pydantic.Field(default=None) + """ + Google service account email + """ + + google_wif_pool_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Google WIF pool ID + """ + + google_wif_provider_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Google WIF provider ID + """ + + last_sync: typing.Optional[dt.datetime] = pydantic.Field(default=None) + """ + Last sync finished time + """ + + last_sync_count: typing.Optional[int] = pydantic.Field(default=None) + """ + Count of tasks synced last time + """ + + last_sync_job: typing.Optional[str] = pydantic.Field(default=None) + """ + Last sync job ID + """ + + meta: typing.Optional[typing.Optional[typing.Any]] = None + prefix: typing.Optional[str] = pydantic.Field(default=None) + """ + GCS bucket prefix + """ + + presign: typing.Optional[bool] = None + presign_ttl: typing.Optional[int] = pydantic.Field(default=None) + """ + Presigned URLs TTL (in minutes) + """ + + project: int = pydantic.Field() + """ + A unique integer value identifying this project. + """ + + regex_filter: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage regex for filtering objects + """ + + status: typing.Optional[StatusC5AEnum] = None + synchronizable: typing.Optional[bool] = None + title: typing.Optional[str] = pydantic.Field(default=None) + """ + Cloud storage title + """ + + traceback: typing.Optional[str] = pydantic.Field(default=None) + """ + Traceback report for the last failed sync + """ + + use_blob_urls: typing.Optional[bool] = pydantic.Field(default=None) + """ + Interpret objects as BLOBs and generate URLs + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/tests/export_storage/test_azure_spi.py b/tests/export_storage/test_azure_spi.py new file mode 100644 index 000000000..29c899c60 --- /dev/null +++ b/tests/export_storage/test_azure_spi.py @@ -0,0 +1,329 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "account_name": "account_name", + "can_delete_objects": True, + "client_id": "client_id", + "client_secret": "client_secret", + "container": "container", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "tenant_id": "tenant_id", + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + "user_delegation_key": "user_delegation_key", + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "account_name": None, + "can_delete_objects": None, + "client_id": None, + "client_secret": None, + "container": None, + "created_at": "datetime", + "description": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "tenant_id": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + "user_delegation_key": None, + } + }, + ) + response = client.export_storage.azure_spi.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.azure_spi.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "account_name": "account_name", + "can_delete_objects": True, + "client_id": "client_id", + "client_secret": "client_secret", + "container": "container", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "tenant_id": "tenant_id", + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + "user_delegation_key": "user_delegation_key", + } + expected_types: typing.Any = { + "account_name": None, + "can_delete_objects": None, + "client_id": None, + "client_secret": None, + "container": None, + "created_at": "datetime", + "description": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "tenant_id": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + "user_delegation_key": None, + } + response = client.export_storage.azure_spi.create(project=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.azure_spi.create(project=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.azure_spi.validate(project=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.azure_spi.validate(project=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "account_name": "account_name", + "can_delete_objects": True, + "client_id": "client_id", + "client_secret": "client_secret", + "container": "container", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "tenant_id": "tenant_id", + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + "user_delegation_key": "user_delegation_key", + } + expected_types: typing.Any = { + "account_name": None, + "can_delete_objects": None, + "client_id": None, + "client_secret": None, + "container": None, + "created_at": "datetime", + "description": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "tenant_id": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + "user_delegation_key": None, + } + response = client.export_storage.azure_spi.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.azure_spi.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.azure_spi.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.azure_spi.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "account_name": "account_name", + "can_delete_objects": True, + "client_id": "client_id", + "client_secret": "client_secret", + "container": "container", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "tenant_id": "tenant_id", + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + "user_delegation_key": "user_delegation_key", + } + expected_types: typing.Any = { + "account_name": None, + "can_delete_objects": None, + "client_id": None, + "client_secret": None, + "container": None, + "created_at": "datetime", + "description": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "tenant_id": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + "user_delegation_key": None, + } + response = client.export_storage.azure_spi.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.azure_spi.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "account_name": "account_name", + "can_delete_objects": True, + "client_id": "client_id", + "client_secret": "client_secret", + "container": "container", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "tenant_id": "tenant_id", + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + "user_delegation_key": "user_delegation_key", + } + expected_types: typing.Any = { + "account_name": None, + "can_delete_objects": None, + "client_id": None, + "client_secret": None, + "container": None, + "created_at": "datetime", + "description": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "tenant_id": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + "user_delegation_key": None, + } + response = client.export_storage.azure_spi.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.azure_spi.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/export_storage/test_gcswif.py b/tests/export_storage/test_gcswif.py new file mode 100644 index 000000000..eab72e819 --- /dev/null +++ b/tests/export_storage/test_gcswif.py @@ -0,0 +1,339 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "bucket": "bucket", + "can_delete_objects": True, + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "google_project_number": "google_project_number", + "google_service_account_email": "google_service_account_email", + "google_wif_pool_id": "google_wif_pool_id", + "google_wif_provider_id": "google_wif_provider_id", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "bucket": None, + "can_delete_objects": None, + "created_at": "datetime", + "description": None, + "google_application_credentials": None, + "google_project_id": None, + "google_project_number": None, + "google_service_account_email": None, + "google_wif_pool_id": None, + "google_wif_provider_id": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + } + }, + ) + response = client.export_storage.gcswif.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.gcswif.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "bucket": "bucket", + "can_delete_objects": True, + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "google_project_number": "google_project_number", + "google_service_account_email": "google_service_account_email", + "google_wif_pool_id": "google_wif_pool_id", + "google_wif_provider_id": "google_wif_provider_id", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + } + expected_types: typing.Any = { + "bucket": None, + "can_delete_objects": None, + "created_at": "datetime", + "description": None, + "google_application_credentials": None, + "google_project_id": None, + "google_project_number": None, + "google_service_account_email": None, + "google_wif_pool_id": None, + "google_wif_provider_id": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + } + response = client.export_storage.gcswif.create(project=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.gcswif.create(project=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.gcswif.validate(project=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.gcswif.validate(project=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "bucket": "bucket", + "can_delete_objects": True, + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "google_project_number": "google_project_number", + "google_service_account_email": "google_service_account_email", + "google_wif_pool_id": "google_wif_pool_id", + "google_wif_provider_id": "google_wif_provider_id", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + } + expected_types: typing.Any = { + "bucket": None, + "can_delete_objects": None, + "created_at": "datetime", + "description": None, + "google_application_credentials": None, + "google_project_id": None, + "google_project_number": None, + "google_service_account_email": None, + "google_wif_pool_id": None, + "google_wif_provider_id": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + } + response = client.export_storage.gcswif.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.gcswif.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.export_storage.gcswif.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.export_storage.gcswif.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "bucket": "bucket", + "can_delete_objects": True, + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "google_project_number": "google_project_number", + "google_service_account_email": "google_service_account_email", + "google_wif_pool_id": "google_wif_pool_id", + "google_wif_provider_id": "google_wif_provider_id", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + } + expected_types: typing.Any = { + "bucket": None, + "can_delete_objects": None, + "created_at": "datetime", + "description": None, + "google_application_credentials": None, + "google_project_id": None, + "google_project_number": None, + "google_service_account_email": None, + "google_wif_pool_id": None, + "google_wif_provider_id": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + } + response = client.export_storage.gcswif.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.gcswif.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "bucket": "bucket", + "can_delete_objects": True, + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "google_project_number": "google_project_number", + "google_service_account_email": "google_service_account_email", + "google_wif_pool_id": "google_wif_pool_id", + "google_wif_provider_id": "google_wif_provider_id", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + } + expected_types: typing.Any = { + "bucket": None, + "can_delete_objects": None, + "created_at": "datetime", + "description": None, + "google_application_credentials": None, + "google_project_id": None, + "google_project_number": None, + "google_service_account_email": None, + "google_wif_pool_id": None, + "google_wif_provider_id": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + } + response = client.export_storage.gcswif.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.export_storage.gcswif.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_azure_spi.py b/tests/import_storage/test_azure_spi.py new file mode 100644 index 000000000..0a98ba502 --- /dev/null +++ b/tests/import_storage/test_azure_spi.py @@ -0,0 +1,339 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "account_name": "account_name", + "client_id": "client_id", + "client_secret": "client_secret", + "container": "container", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "presign": True, + "presign_ttl": 1, + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "tenant_id": "tenant_id", + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + "user_delegation_key": "user_delegation_key", + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "account_name": None, + "client_id": None, + "client_secret": None, + "container": None, + "created_at": "datetime", + "description": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "presign": None, + "presign_ttl": "integer", + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "tenant_id": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + "user_delegation_key": None, + } + }, + ) + response = client.import_storage.azure_spi.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.azure_spi.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "account_name": "account_name", + "client_id": "client_id", + "client_secret": "client_secret", + "container": "container", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "presign": True, + "presign_ttl": 1, + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "tenant_id": "tenant_id", + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + "user_delegation_key": "user_delegation_key", + } + expected_types: typing.Any = { + "account_name": None, + "client_id": None, + "client_secret": None, + "container": None, + "created_at": "datetime", + "description": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "presign": None, + "presign_ttl": "integer", + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "tenant_id": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + "user_delegation_key": None, + } + response = client.import_storage.azure_spi.create(project=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.azure_spi.create(project=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.azure_spi.validate(project=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.azure_spi.validate(project=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "account_name": "account_name", + "client_id": "client_id", + "client_secret": "client_secret", + "container": "container", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "presign": True, + "presign_ttl": 1, + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "tenant_id": "tenant_id", + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + "user_delegation_key": "user_delegation_key", + } + expected_types: typing.Any = { + "account_name": None, + "client_id": None, + "client_secret": None, + "container": None, + "created_at": "datetime", + "description": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "presign": None, + "presign_ttl": "integer", + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "tenant_id": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + "user_delegation_key": None, + } + response = client.import_storage.azure_spi.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.azure_spi.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.azure_spi.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.azure_spi.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "account_name": "account_name", + "client_id": "client_id", + "client_secret": "client_secret", + "container": "container", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "presign": True, + "presign_ttl": 1, + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "tenant_id": "tenant_id", + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + "user_delegation_key": "user_delegation_key", + } + expected_types: typing.Any = { + "account_name": None, + "client_id": None, + "client_secret": None, + "container": None, + "created_at": "datetime", + "description": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "presign": None, + "presign_ttl": "integer", + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "tenant_id": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + "user_delegation_key": None, + } + response = client.import_storage.azure_spi.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.azure_spi.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "account_name": "account_name", + "client_id": "client_id", + "client_secret": "client_secret", + "container": "container", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "presign": True, + "presign_ttl": 1, + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "tenant_id": "tenant_id", + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + "user_delegation_key": "user_delegation_key", + } + expected_types: typing.Any = { + "account_name": None, + "client_id": None, + "client_secret": None, + "container": None, + "created_at": "datetime", + "description": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "presign": None, + "presign_ttl": "integer", + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "tenant_id": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + "user_delegation_key": None, + } + response = client.import_storage.azure_spi.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.azure_spi.sync(id=1) + validate_response(async_response, expected_response, expected_types) diff --git a/tests/import_storage/test_gcswif.py b/tests/import_storage/test_gcswif.py new file mode 100644 index 000000000..530f7b291 --- /dev/null +++ b/tests/import_storage/test_gcswif.py @@ -0,0 +1,349 @@ +# This file was auto-generated by Fern from our API Definition. + +from label_studio_sdk import LabelStudio +from label_studio_sdk import AsyncLabelStudio +import typing +from ..utilities import validate_response + + +async def test_list_(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = [ + { + "bucket": "bucket", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "google_project_number": "google_project_number", + "google_service_account_email": "google_service_account_email", + "google_wif_pool_id": "google_wif_pool_id", + "google_wif_provider_id": "google_wif_provider_id", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "presign": True, + "presign_ttl": 1, + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + } + ] + expected_types: typing.Tuple[typing.Any, typing.Any] = ( + "list", + { + 0: { + "bucket": None, + "created_at": "datetime", + "description": None, + "google_application_credentials": None, + "google_project_id": None, + "google_project_number": None, + "google_service_account_email": None, + "google_wif_pool_id": None, + "google_wif_provider_id": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "presign": None, + "presign_ttl": "integer", + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + } + }, + ) + response = client.import_storage.gcswif.list() + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.gcswif.list() + validate_response(async_response, expected_response, expected_types) + + +async def test_create(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "bucket": "bucket", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "google_project_number": "google_project_number", + "google_service_account_email": "google_service_account_email", + "google_wif_pool_id": "google_wif_pool_id", + "google_wif_provider_id": "google_wif_provider_id", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "presign": True, + "presign_ttl": 1, + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + } + expected_types: typing.Any = { + "bucket": None, + "created_at": "datetime", + "description": None, + "google_application_credentials": None, + "google_project_id": None, + "google_project_number": None, + "google_service_account_email": None, + "google_wif_pool_id": None, + "google_wif_provider_id": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "presign": None, + "presign_ttl": "integer", + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + } + response = client.import_storage.gcswif.create(project=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.gcswif.create(project=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_validate(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.gcswif.validate(project=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.gcswif.validate(project=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_get(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "bucket": "bucket", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "google_project_number": "google_project_number", + "google_service_account_email": "google_service_account_email", + "google_wif_pool_id": "google_wif_pool_id", + "google_wif_provider_id": "google_wif_provider_id", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "presign": True, + "presign_ttl": 1, + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + } + expected_types: typing.Any = { + "bucket": None, + "created_at": "datetime", + "description": None, + "google_application_credentials": None, + "google_project_id": None, + "google_project_number": None, + "google_service_account_email": None, + "google_wif_pool_id": None, + "google_wif_provider_id": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "presign": None, + "presign_ttl": "integer", + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + } + response = client.import_storage.gcswif.get(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.gcswif.get(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_delete(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + # Type ignore to avoid mypy complaining about the function not being meant to return a value + assert ( + client.import_storage.gcswif.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + assert ( + await async_client.import_storage.gcswif.delete(id=1) # type: ignore[func-returns-value] + is None + ) + + +async def test_update(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "bucket": "bucket", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "google_project_number": "google_project_number", + "google_service_account_email": "google_service_account_email", + "google_wif_pool_id": "google_wif_pool_id", + "google_wif_provider_id": "google_wif_provider_id", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "presign": True, + "presign_ttl": 1, + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + } + expected_types: typing.Any = { + "bucket": None, + "created_at": "datetime", + "description": None, + "google_application_credentials": None, + "google_project_id": None, + "google_project_number": None, + "google_service_account_email": None, + "google_wif_pool_id": None, + "google_wif_provider_id": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "presign": None, + "presign_ttl": "integer", + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + } + response = client.import_storage.gcswif.update(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.gcswif.update(id=1) + validate_response(async_response, expected_response, expected_types) + + +async def test_sync(client: LabelStudio, async_client: AsyncLabelStudio) -> None: + expected_response: typing.Any = { + "bucket": "bucket", + "created_at": "2024-01-15T09:30:00Z", + "description": "description", + "google_application_credentials": "google_application_credentials", + "google_project_id": "google_project_id", + "google_project_number": "google_project_number", + "google_service_account_email": "google_service_account_email", + "google_wif_pool_id": "google_wif_pool_id", + "google_wif_provider_id": "google_wif_provider_id", + "id": 1, + "last_sync": "2024-01-15T09:30:00Z", + "last_sync_count": 1, + "last_sync_job": "last_sync_job", + "meta": {"key": "value"}, + "prefix": "prefix", + "presign": True, + "presign_ttl": 1, + "project": 1, + "regex_filter": "regex_filter", + "status": "initialized", + "synchronizable": True, + "title": "title", + "traceback": "traceback", + "type": "type", + "use_blob_urls": True, + } + expected_types: typing.Any = { + "bucket": None, + "created_at": "datetime", + "description": None, + "google_application_credentials": None, + "google_project_id": None, + "google_project_number": None, + "google_service_account_email": None, + "google_wif_pool_id": None, + "google_wif_provider_id": None, + "id": "integer", + "last_sync": "datetime", + "last_sync_count": "integer", + "last_sync_job": None, + "meta": None, + "prefix": None, + "presign": None, + "presign_ttl": "integer", + "project": "integer", + "regex_filter": None, + "status": None, + "synchronizable": None, + "title": None, + "traceback": None, + "type": None, + "use_blob_urls": None, + } + response = client.import_storage.gcswif.sync(id=1) + validate_response(response, expected_response, expected_types) + + async_response = await async_client.import_storage.gcswif.sync(id=1) + validate_response(async_response, expected_response, expected_types)