cirro_api_client.v1.models
Contains all the data models used in inputs/outputs
1"""Contains all the data models used in inputs/outputs""" 2 3from .agent import Agent 4from .agent_detail import AgentDetail 5from .agent_detail_environment_configuration import AgentDetailEnvironmentConfiguration 6from .agent_detail_tags import AgentDetailTags 7from .agent_input import AgentInput 8from .agent_input_configuration_options_schema import AgentInputConfigurationOptionsSchema 9from .agent_input_environment_configuration import AgentInputEnvironmentConfiguration 10from .agent_input_tags import AgentInputTags 11from .agent_registration import AgentRegistration 12from .agent_status import AgentStatus 13from .agent_tags import AgentTags 14from .allowed_data_type import AllowedDataType 15from .approve_project_access_request import ApproveProjectAccessRequest 16from .artifact import Artifact 17from .artifact_type import ArtifactType 18from .audit_event import AuditEvent 19from .audit_event_changes import AuditEventChanges 20from .audit_event_event_detail import AuditEventEventDetail 21from .auth_info import AuthInfo 22from .aws_credentials import AWSCredentials 23from .billing_account import BillingAccount 24from .billing_account_request import BillingAccountRequest 25from .billing_method import BillingMethod 26from .budget_period import BudgetPeriod 27from .calculate_pipeline_cost_request import CalculatePipelineCostRequest 28from .classification_input import ClassificationInput 29from .cloud_account import CloudAccount 30from .cloud_account_type import CloudAccountType 31from .column_definition import ColumnDefinition 32from .compute_environment_configuration import ComputeEnvironmentConfiguration 33from .compute_environment_configuration_input import ComputeEnvironmentConfigurationInput 34from .compute_environment_configuration_input_properties import ComputeEnvironmentConfigurationInputProperties 35from .compute_environment_configuration_properties import ComputeEnvironmentConfigurationProperties 36from .contact import Contact 37from .contact_input import ContactInput 38from .cost_response import CostResponse 39from .create_notebook_instance_request import CreateNotebookInstanceRequest 40from .create_project_access_request import CreateProjectAccessRequest 41from .create_reference_request import CreateReferenceRequest 42from .create_response import CreateResponse 43from .custom_pipeline_settings import CustomPipelineSettings 44from .custom_process_input import CustomProcessInput 45from .customer_type import CustomerType 46from .dashboard import Dashboard 47from .dashboard_dashboard_data import DashboardDashboardData 48from .dashboard_info import DashboardInfo 49from .dashboard_request import DashboardRequest 50from .dashboard_request_dashboard_data import DashboardRequestDashboardData 51from .dashboard_request_info import DashboardRequestInfo 52from .data_file import DataFile 53from .data_file_metadata import DataFileMetadata 54from .dataset import Dataset 55from .dataset_assets_manifest import DatasetAssetsManifest 56from .dataset_condition import DatasetCondition 57from .dataset_condition_field import DatasetConditionField 58from .dataset_detail import DatasetDetail 59from .dataset_detail_info import DatasetDetailInfo 60from .dataset_detail_params import DatasetDetailParams 61from .dataset_detail_source_sample_files_map import DatasetDetailSourceSampleFilesMap 62from .dataset_viz import DatasetViz 63from .dataset_viz_config import DatasetVizConfig 64from .discussion import Discussion 65from .discussion_input import DiscussionInput 66from .discussion_type import DiscussionType 67from .entity import Entity 68from .entity_type import EntityType 69from .environment_type import EnvironmentType 70from .error_message import ErrorMessage 71from .executor import Executor 72from .feature_flags import FeatureFlags 73from .file_entry import FileEntry 74from .file_entry_metadata import FileEntryMetadata 75from .file_mapping_rule import FileMappingRule 76from .file_name_match import FileNameMatch 77from .file_name_pattern import FileNamePattern 78from .file_requirements import FileRequirements 79from .form_schema import FormSchema 80from .form_schema_form import FormSchemaForm 81from .form_schema_ui import FormSchemaUi 82from .fulfillment_response import FulfillmentResponse 83from .generate_sftp_credentials_request import GenerateSftpCredentialsRequest 84from .get_execution_logs_response import GetExecutionLogsResponse 85from .get_project_summary_response_200 import GetProjectSummaryResponse200 86from .governance_access_type import GovernanceAccessType 87from .governance_classification import GovernanceClassification 88from .governance_contact import GovernanceContact 89from .governance_expiry import GovernanceExpiry 90from .governance_expiry_type import GovernanceExpiryType 91from .governance_file import GovernanceFile 92from .governance_file_access_request import GovernanceFileAccessRequest 93from .governance_file_input import GovernanceFileInput 94from .governance_file_type import GovernanceFileType 95from .governance_requirement import GovernanceRequirement 96from .governance_requirement_project_file_map import GovernanceRequirementProjectFileMap 97from .governance_scope import GovernanceScope 98from .governance_training_verification import GovernanceTrainingVerification 99from .governance_type import GovernanceType 100from .group_cost import GroupCost 101from .import_data_request import ImportDataRequest 102from .import_data_request_download_method import ImportDataRequestDownloadMethod 103from .invite_user_request import InviteUserRequest 104from .invite_user_response import InviteUserResponse 105from .list_events_entity_type import ListEventsEntityType 106from .log_entry import LogEntry 107from .login_provider import LoginProvider 108from .message import Message 109from .message_input import MessageInput 110from .message_type import MessageType 111from .metric_record import MetricRecord 112from .metric_record_services import MetricRecordServices 113from .mounted_dataset import MountedDataset 114from .move_dataset_input import MoveDatasetInput 115from .move_dataset_response import MoveDatasetResponse 116from .named_item import NamedItem 117from .notebook_instance import NotebookInstance 118from .notebook_instance_status_response import NotebookInstanceStatusResponse 119from .open_notebook_instance_response import OpenNotebookInstanceResponse 120from .paginated_response_dataset_list_dto import PaginatedResponseDatasetListDto 121from .paginated_response_discussion import PaginatedResponseDiscussion 122from .paginated_response_message import PaginatedResponseMessage 123from .paginated_response_sample_dto import PaginatedResponseSampleDto 124from .paginated_response_user_dto import PaginatedResponseUserDto 125from .pipeline_code import PipelineCode 126from .pipeline_cost import PipelineCost 127from .portal_error_response import PortalErrorResponse 128from .postpone_workspace_autostop_input import PostponeWorkspaceAutostopInput 129from .process import Process 130from .process_detail import ProcessDetail 131from .process_documentation import ProcessDocumentation 132from .project import Project 133from .project_access_request import ProjectAccessRequest 134from .project_access_type import ProjectAccessType 135from .project_create_options import ProjectCreateOptions 136from .project_detail import ProjectDetail 137from .project_file_access_request import ProjectFileAccessRequest 138from .project_input import ProjectInput 139from .project_metrics import ProjectMetrics 140from .project_request import ProjectRequest 141from .project_requirement import ProjectRequirement 142from .project_role import ProjectRole 143from .project_settings import ProjectSettings 144from .project_user import ProjectUser 145from .reference import Reference 146from .reference_type import ReferenceType 147from .reference_type_validation_item import ReferenceTypeValidationItem 148from .repository_type import RepositoryType 149from .request_status import RequestStatus 150from .requirement_fulfillment_input import RequirementFulfillmentInput 151from .requirement_input import RequirementInput 152from .resources_info import ResourcesInfo 153from .run_analysis_request import RunAnalysisRequest 154from .run_analysis_request_params import RunAnalysisRequestParams 155from .run_analysis_request_source_sample_files_map import RunAnalysisRequestSourceSampleFilesMap 156from .sample import Sample 157from .sample_metadata import SampleMetadata 158from .sample_request import SampleRequest 159from .sample_request_metadata import SampleRequestMetadata 160from .sample_sheets import SampleSheets 161from .service_connection import ServiceConnection 162from .set_user_project_role_request import SetUserProjectRoleRequest 163from .sftp_credentials import SftpCredentials 164from .share import Share 165from .share_detail import ShareDetail 166from .share_input import ShareInput 167from .share_type import ShareType 168from .sharing_type import SharingType 169from .sort_order import SortOrder 170from .status import Status 171from .stop_execution_response import StopExecutionResponse 172from .sync_status import SyncStatus 173from .system_info_response import SystemInfoResponse 174from .table import Table 175from .tag import Tag 176from .task import Task 177from .task_cost import TaskCost 178from .tenant_info import TenantInfo 179from .update_dataset_request import UpdateDatasetRequest 180from .update_user_request import UpdateUserRequest 181from .upload_dataset_create_response import UploadDatasetCreateResponse 182from .upload_dataset_request import UploadDatasetRequest 183from .user import User 184from .user_detail import UserDetail 185from .user_project_assignment import UserProjectAssignment 186from .user_settings import UserSettings 187from .validate_file_name_patterns_request import ValidateFileNamePatternsRequest 188from .validate_file_requirements_request import ValidateFileRequirementsRequest 189from .version_specification import VersionSpecification 190from .workspace import Workspace 191from .workspace_compute_config import WorkspaceComputeConfig 192from .workspace_compute_config_environment_variables import WorkspaceComputeConfigEnvironmentVariables 193from .workspace_connection_response import WorkspaceConnectionResponse 194from .workspace_environment import WorkspaceEnvironment 195from .workspace_input import WorkspaceInput 196from .workspace_session import WorkspaceSession 197 198__all__ = ( 199 "Agent", 200 "AgentDetail", 201 "AgentDetailEnvironmentConfiguration", 202 "AgentDetailTags", 203 "AgentInput", 204 "AgentInputConfigurationOptionsSchema", 205 "AgentInputEnvironmentConfiguration", 206 "AgentInputTags", 207 "AgentRegistration", 208 "AgentStatus", 209 "AgentTags", 210 "AllowedDataType", 211 "ApproveProjectAccessRequest", 212 "Artifact", 213 "ArtifactType", 214 "AuditEvent", 215 "AuditEventChanges", 216 "AuditEventEventDetail", 217 "AuthInfo", 218 "AWSCredentials", 219 "BillingAccount", 220 "BillingAccountRequest", 221 "BillingMethod", 222 "BudgetPeriod", 223 "CalculatePipelineCostRequest", 224 "ClassificationInput", 225 "CloudAccount", 226 "CloudAccountType", 227 "ColumnDefinition", 228 "ComputeEnvironmentConfiguration", 229 "ComputeEnvironmentConfigurationInput", 230 "ComputeEnvironmentConfigurationInputProperties", 231 "ComputeEnvironmentConfigurationProperties", 232 "Contact", 233 "ContactInput", 234 "CostResponse", 235 "CreateNotebookInstanceRequest", 236 "CreateProjectAccessRequest", 237 "CreateReferenceRequest", 238 "CreateResponse", 239 "CustomerType", 240 "CustomPipelineSettings", 241 "CustomProcessInput", 242 "Dashboard", 243 "DashboardDashboardData", 244 "DashboardInfo", 245 "DashboardRequest", 246 "DashboardRequestDashboardData", 247 "DashboardRequestInfo", 248 "DataFile", 249 "DataFileMetadata", 250 "Dataset", 251 "DatasetAssetsManifest", 252 "DatasetCondition", 253 "DatasetConditionField", 254 "DatasetDetail", 255 "DatasetDetailInfo", 256 "DatasetDetailParams", 257 "DatasetDetailSourceSampleFilesMap", 258 "DatasetViz", 259 "DatasetVizConfig", 260 "Discussion", 261 "DiscussionInput", 262 "DiscussionType", 263 "Entity", 264 "EntityType", 265 "EnvironmentType", 266 "ErrorMessage", 267 "Executor", 268 "FeatureFlags", 269 "FileEntry", 270 "FileEntryMetadata", 271 "FileMappingRule", 272 "FileNameMatch", 273 "FileNamePattern", 274 "FileRequirements", 275 "FormSchema", 276 "FormSchemaForm", 277 "FormSchemaUi", 278 "FulfillmentResponse", 279 "GenerateSftpCredentialsRequest", 280 "GetExecutionLogsResponse", 281 "GetProjectSummaryResponse200", 282 "GovernanceAccessType", 283 "GovernanceClassification", 284 "GovernanceContact", 285 "GovernanceExpiry", 286 "GovernanceExpiryType", 287 "GovernanceFile", 288 "GovernanceFileAccessRequest", 289 "GovernanceFileInput", 290 "GovernanceFileType", 291 "GovernanceRequirement", 292 "GovernanceRequirementProjectFileMap", 293 "GovernanceScope", 294 "GovernanceTrainingVerification", 295 "GovernanceType", 296 "GroupCost", 297 "ImportDataRequest", 298 "ImportDataRequestDownloadMethod", 299 "InviteUserRequest", 300 "InviteUserResponse", 301 "ListEventsEntityType", 302 "LogEntry", 303 "LoginProvider", 304 "Message", 305 "MessageInput", 306 "MessageType", 307 "MetricRecord", 308 "MetricRecordServices", 309 "MountedDataset", 310 "MoveDatasetInput", 311 "MoveDatasetResponse", 312 "NamedItem", 313 "NotebookInstance", 314 "NotebookInstanceStatusResponse", 315 "OpenNotebookInstanceResponse", 316 "PaginatedResponseDatasetListDto", 317 "PaginatedResponseDiscussion", 318 "PaginatedResponseMessage", 319 "PaginatedResponseSampleDto", 320 "PaginatedResponseUserDto", 321 "PipelineCode", 322 "PipelineCost", 323 "PortalErrorResponse", 324 "PostponeWorkspaceAutostopInput", 325 "Process", 326 "ProcessDetail", 327 "ProcessDocumentation", 328 "Project", 329 "ProjectAccessRequest", 330 "ProjectAccessType", 331 "ProjectCreateOptions", 332 "ProjectDetail", 333 "ProjectFileAccessRequest", 334 "ProjectInput", 335 "ProjectMetrics", 336 "ProjectRequest", 337 "ProjectRequirement", 338 "ProjectRole", 339 "ProjectSettings", 340 "ProjectUser", 341 "Reference", 342 "ReferenceType", 343 "ReferenceTypeValidationItem", 344 "RepositoryType", 345 "RequestStatus", 346 "RequirementFulfillmentInput", 347 "RequirementInput", 348 "ResourcesInfo", 349 "RunAnalysisRequest", 350 "RunAnalysisRequestParams", 351 "RunAnalysisRequestSourceSampleFilesMap", 352 "Sample", 353 "SampleMetadata", 354 "SampleRequest", 355 "SampleRequestMetadata", 356 "SampleSheets", 357 "ServiceConnection", 358 "SetUserProjectRoleRequest", 359 "SftpCredentials", 360 "Share", 361 "ShareDetail", 362 "ShareInput", 363 "ShareType", 364 "SharingType", 365 "SortOrder", 366 "Status", 367 "StopExecutionResponse", 368 "SyncStatus", 369 "SystemInfoResponse", 370 "Table", 371 "Tag", 372 "Task", 373 "TaskCost", 374 "TenantInfo", 375 "UpdateDatasetRequest", 376 "UpdateUserRequest", 377 "UploadDatasetCreateResponse", 378 "UploadDatasetRequest", 379 "User", 380 "UserDetail", 381 "UserProjectAssignment", 382 "UserSettings", 383 "ValidateFileNamePatternsRequest", 384 "ValidateFileRequirementsRequest", 385 "VersionSpecification", 386 "Workspace", 387 "WorkspaceComputeConfig", 388 "WorkspaceComputeConfigEnvironmentVariables", 389 "WorkspaceConnectionResponse", 390 "WorkspaceEnvironment", 391 "WorkspaceInput", 392 "WorkspaceSession", 393)
20@_attrs_define 21class Agent: 22 """Details of the agent 23 24 Attributes: 25 status (AgentStatus): The status of the agent 26 id (str | Unset): The unique ID of the agent 27 name (str | Unset): The display name of the agent 28 tags (AgentTags | Unset): Tags associated with the agent 29 """ 30 31 status: AgentStatus 32 id: str | Unset = UNSET 33 name: str | Unset = UNSET 34 tags: AgentTags | Unset = UNSET 35 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 36 37 def to_dict(self) -> dict[str, Any]: 38 status = self.status.value 39 40 id = self.id 41 42 name = self.name 43 44 tags: dict[str, Any] | Unset = UNSET 45 if not isinstance(self.tags, Unset): 46 tags = self.tags.to_dict() 47 48 field_dict: dict[str, Any] = {} 49 field_dict.update(self.additional_properties) 50 field_dict.update( 51 { 52 "status": status, 53 } 54 ) 55 if id is not UNSET: 56 field_dict["id"] = id 57 if name is not UNSET: 58 field_dict["name"] = name 59 if tags is not UNSET: 60 field_dict["tags"] = tags 61 62 return field_dict 63 64 @classmethod 65 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 66 from ..models.agent_tags import AgentTags 67 68 d = dict(src_dict) 69 status = AgentStatus(d.pop("status")) 70 71 id = d.pop("id", UNSET) 72 73 name = d.pop("name", UNSET) 74 75 _tags = d.pop("tags", UNSET) 76 tags: AgentTags | Unset 77 if isinstance(_tags, Unset): 78 tags = UNSET 79 else: 80 tags = AgentTags.from_dict(_tags) 81 82 agent = cls( 83 status=status, 84 id=id, 85 name=name, 86 tags=tags, 87 ) 88 89 agent.additional_properties = d 90 return agent 91 92 @property 93 def additional_keys(self) -> list[str]: 94 return list(self.additional_properties.keys()) 95 96 def __getitem__(self, key: str) -> Any: 97 return self.additional_properties[key] 98 99 def __setitem__(self, key: str, value: Any) -> None: 100 self.additional_properties[key] = value 101 102 def __delitem__(self, key: str) -> None: 103 del self.additional_properties[key] 104 105 def __contains__(self, key: str) -> bool: 106 return key in self.additional_properties
Details of the agent
Attributes:
- status (AgentStatus): The status of the agent
- id (str | Unset): The unique ID of the agent
- name (str | Unset): The display name of the agent
- tags (AgentTags | Unset): Tags associated with the agent
27def __init__(self, status, id=attr_dict['id'].default, name=attr_dict['name'].default, tags=attr_dict['tags'].default): 28 self.status = status 29 self.id = id 30 self.name = name 31 self.tags = tags 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Agent.
37 def to_dict(self) -> dict[str, Any]: 38 status = self.status.value 39 40 id = self.id 41 42 name = self.name 43 44 tags: dict[str, Any] | Unset = UNSET 45 if not isinstance(self.tags, Unset): 46 tags = self.tags.to_dict() 47 48 field_dict: dict[str, Any] = {} 49 field_dict.update(self.additional_properties) 50 field_dict.update( 51 { 52 "status": status, 53 } 54 ) 55 if id is not UNSET: 56 field_dict["id"] = id 57 if name is not UNSET: 58 field_dict["name"] = name 59 if tags is not UNSET: 60 field_dict["tags"] = tags 61 62 return field_dict
64 @classmethod 65 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 66 from ..models.agent_tags import AgentTags 67 68 d = dict(src_dict) 69 status = AgentStatus(d.pop("status")) 70 71 id = d.pop("id", UNSET) 72 73 name = d.pop("name", UNSET) 74 75 _tags = d.pop("tags", UNSET) 76 tags: AgentTags | Unset 77 if isinstance(_tags, Unset): 78 tags = UNSET 79 else: 80 tags = AgentTags.from_dict(_tags) 81 82 agent = cls( 83 status=status, 84 id=id, 85 name=name, 86 tags=tags, 87 ) 88 89 agent.additional_properties = d 90 return agent
24@_attrs_define 25class AgentDetail: 26 """ 27 Attributes: 28 id (str): 29 name (str): 30 agent_role_arn (str): 31 status (AgentStatus): The status of the agent 32 created_by (str): 33 created_at (datetime.datetime): 34 updated_at (datetime.datetime): 35 registration (AgentRegistration | None | Unset): 36 tags (AgentDetailTags | None | Unset): 37 environment_configuration (AgentDetailEnvironmentConfiguration | None | Unset): 38 """ 39 40 id: str 41 name: str 42 agent_role_arn: str 43 status: AgentStatus 44 created_by: str 45 created_at: datetime.datetime 46 updated_at: datetime.datetime 47 registration: AgentRegistration | None | Unset = UNSET 48 tags: AgentDetailTags | None | Unset = UNSET 49 environment_configuration: AgentDetailEnvironmentConfiguration | None | Unset = UNSET 50 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 51 52 def to_dict(self) -> dict[str, Any]: 53 from ..models.agent_detail_environment_configuration import AgentDetailEnvironmentConfiguration 54 from ..models.agent_detail_tags import AgentDetailTags 55 from ..models.agent_registration import AgentRegistration 56 57 id = self.id 58 59 name = self.name 60 61 agent_role_arn = self.agent_role_arn 62 63 status = self.status.value 64 65 created_by = self.created_by 66 67 created_at = self.created_at.isoformat() 68 69 updated_at = self.updated_at.isoformat() 70 71 registration: dict[str, Any] | None | Unset 72 if isinstance(self.registration, Unset): 73 registration = UNSET 74 elif isinstance(self.registration, AgentRegistration): 75 registration = self.registration.to_dict() 76 else: 77 registration = self.registration 78 79 tags: dict[str, Any] | None | Unset 80 if isinstance(self.tags, Unset): 81 tags = UNSET 82 elif isinstance(self.tags, AgentDetailTags): 83 tags = self.tags.to_dict() 84 else: 85 tags = self.tags 86 87 environment_configuration: dict[str, Any] | None | Unset 88 if isinstance(self.environment_configuration, Unset): 89 environment_configuration = UNSET 90 elif isinstance(self.environment_configuration, AgentDetailEnvironmentConfiguration): 91 environment_configuration = self.environment_configuration.to_dict() 92 else: 93 environment_configuration = self.environment_configuration 94 95 field_dict: dict[str, Any] = {} 96 field_dict.update(self.additional_properties) 97 field_dict.update( 98 { 99 "id": id, 100 "name": name, 101 "agentRoleArn": agent_role_arn, 102 "status": status, 103 "createdBy": created_by, 104 "createdAt": created_at, 105 "updatedAt": updated_at, 106 } 107 ) 108 if registration is not UNSET: 109 field_dict["registration"] = registration 110 if tags is not UNSET: 111 field_dict["tags"] = tags 112 if environment_configuration is not UNSET: 113 field_dict["environmentConfiguration"] = environment_configuration 114 115 return field_dict 116 117 @classmethod 118 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 119 from ..models.agent_detail_environment_configuration import AgentDetailEnvironmentConfiguration 120 from ..models.agent_detail_tags import AgentDetailTags 121 from ..models.agent_registration import AgentRegistration 122 123 d = dict(src_dict) 124 id = d.pop("id") 125 126 name = d.pop("name") 127 128 agent_role_arn = d.pop("agentRoleArn") 129 130 status = AgentStatus(d.pop("status")) 131 132 created_by = d.pop("createdBy") 133 134 created_at = isoparse(d.pop("createdAt")) 135 136 updated_at = isoparse(d.pop("updatedAt")) 137 138 def _parse_registration(data: object) -> AgentRegistration | None | Unset: 139 if data is None: 140 return data 141 if isinstance(data, Unset): 142 return data 143 try: 144 if not isinstance(data, dict): 145 raise TypeError() 146 registration_type_1 = AgentRegistration.from_dict(data) 147 148 return registration_type_1 149 except (TypeError, ValueError, AttributeError, KeyError): 150 pass 151 return cast(AgentRegistration | None | Unset, data) 152 153 registration = _parse_registration(d.pop("registration", UNSET)) 154 155 def _parse_tags(data: object) -> AgentDetailTags | None | Unset: 156 if data is None: 157 return data 158 if isinstance(data, Unset): 159 return data 160 try: 161 if not isinstance(data, dict): 162 raise TypeError() 163 tags_type_0 = AgentDetailTags.from_dict(data) 164 165 return tags_type_0 166 except (TypeError, ValueError, AttributeError, KeyError): 167 pass 168 return cast(AgentDetailTags | None | Unset, data) 169 170 tags = _parse_tags(d.pop("tags", UNSET)) 171 172 def _parse_environment_configuration(data: object) -> AgentDetailEnvironmentConfiguration | None | Unset: 173 if data is None: 174 return data 175 if isinstance(data, Unset): 176 return data 177 try: 178 if not isinstance(data, dict): 179 raise TypeError() 180 environment_configuration_type_0 = AgentDetailEnvironmentConfiguration.from_dict(data) 181 182 return environment_configuration_type_0 183 except (TypeError, ValueError, AttributeError, KeyError): 184 pass 185 return cast(AgentDetailEnvironmentConfiguration | None | Unset, data) 186 187 environment_configuration = _parse_environment_configuration(d.pop("environmentConfiguration", UNSET)) 188 189 agent_detail = cls( 190 id=id, 191 name=name, 192 agent_role_arn=agent_role_arn, 193 status=status, 194 created_by=created_by, 195 created_at=created_at, 196 updated_at=updated_at, 197 registration=registration, 198 tags=tags, 199 environment_configuration=environment_configuration, 200 ) 201 202 agent_detail.additional_properties = d 203 return agent_detail 204 205 @property 206 def additional_keys(self) -> list[str]: 207 return list(self.additional_properties.keys()) 208 209 def __getitem__(self, key: str) -> Any: 210 return self.additional_properties[key] 211 212 def __setitem__(self, key: str, value: Any) -> None: 213 self.additional_properties[key] = value 214 215 def __delitem__(self, key: str) -> None: 216 del self.additional_properties[key] 217 218 def __contains__(self, key: str) -> bool: 219 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- agent_role_arn (str):
- status (AgentStatus): The status of the agent
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- registration (AgentRegistration | None | Unset):
- tags (AgentDetailTags | None | Unset):
- environment_configuration (AgentDetailEnvironmentConfiguration | None | Unset):
33def __init__(self, id, name, agent_role_arn, status, created_by, created_at, updated_at, registration=attr_dict['registration'].default, tags=attr_dict['tags'].default, environment_configuration=attr_dict['environment_configuration'].default): 34 self.id = id 35 self.name = name 36 self.agent_role_arn = agent_role_arn 37 self.status = status 38 self.created_by = created_by 39 self.created_at = created_at 40 self.updated_at = updated_at 41 self.registration = registration 42 self.tags = tags 43 self.environment_configuration = environment_configuration 44 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AgentDetail.
52 def to_dict(self) -> dict[str, Any]: 53 from ..models.agent_detail_environment_configuration import AgentDetailEnvironmentConfiguration 54 from ..models.agent_detail_tags import AgentDetailTags 55 from ..models.agent_registration import AgentRegistration 56 57 id = self.id 58 59 name = self.name 60 61 agent_role_arn = self.agent_role_arn 62 63 status = self.status.value 64 65 created_by = self.created_by 66 67 created_at = self.created_at.isoformat() 68 69 updated_at = self.updated_at.isoformat() 70 71 registration: dict[str, Any] | None | Unset 72 if isinstance(self.registration, Unset): 73 registration = UNSET 74 elif isinstance(self.registration, AgentRegistration): 75 registration = self.registration.to_dict() 76 else: 77 registration = self.registration 78 79 tags: dict[str, Any] | None | Unset 80 if isinstance(self.tags, Unset): 81 tags = UNSET 82 elif isinstance(self.tags, AgentDetailTags): 83 tags = self.tags.to_dict() 84 else: 85 tags = self.tags 86 87 environment_configuration: dict[str, Any] | None | Unset 88 if isinstance(self.environment_configuration, Unset): 89 environment_configuration = UNSET 90 elif isinstance(self.environment_configuration, AgentDetailEnvironmentConfiguration): 91 environment_configuration = self.environment_configuration.to_dict() 92 else: 93 environment_configuration = self.environment_configuration 94 95 field_dict: dict[str, Any] = {} 96 field_dict.update(self.additional_properties) 97 field_dict.update( 98 { 99 "id": id, 100 "name": name, 101 "agentRoleArn": agent_role_arn, 102 "status": status, 103 "createdBy": created_by, 104 "createdAt": created_at, 105 "updatedAt": updated_at, 106 } 107 ) 108 if registration is not UNSET: 109 field_dict["registration"] = registration 110 if tags is not UNSET: 111 field_dict["tags"] = tags 112 if environment_configuration is not UNSET: 113 field_dict["environmentConfiguration"] = environment_configuration 114 115 return field_dict
117 @classmethod 118 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 119 from ..models.agent_detail_environment_configuration import AgentDetailEnvironmentConfiguration 120 from ..models.agent_detail_tags import AgentDetailTags 121 from ..models.agent_registration import AgentRegistration 122 123 d = dict(src_dict) 124 id = d.pop("id") 125 126 name = d.pop("name") 127 128 agent_role_arn = d.pop("agentRoleArn") 129 130 status = AgentStatus(d.pop("status")) 131 132 created_by = d.pop("createdBy") 133 134 created_at = isoparse(d.pop("createdAt")) 135 136 updated_at = isoparse(d.pop("updatedAt")) 137 138 def _parse_registration(data: object) -> AgentRegistration | None | Unset: 139 if data is None: 140 return data 141 if isinstance(data, Unset): 142 return data 143 try: 144 if not isinstance(data, dict): 145 raise TypeError() 146 registration_type_1 = AgentRegistration.from_dict(data) 147 148 return registration_type_1 149 except (TypeError, ValueError, AttributeError, KeyError): 150 pass 151 return cast(AgentRegistration | None | Unset, data) 152 153 registration = _parse_registration(d.pop("registration", UNSET)) 154 155 def _parse_tags(data: object) -> AgentDetailTags | None | Unset: 156 if data is None: 157 return data 158 if isinstance(data, Unset): 159 return data 160 try: 161 if not isinstance(data, dict): 162 raise TypeError() 163 tags_type_0 = AgentDetailTags.from_dict(data) 164 165 return tags_type_0 166 except (TypeError, ValueError, AttributeError, KeyError): 167 pass 168 return cast(AgentDetailTags | None | Unset, data) 169 170 tags = _parse_tags(d.pop("tags", UNSET)) 171 172 def _parse_environment_configuration(data: object) -> AgentDetailEnvironmentConfiguration | None | Unset: 173 if data is None: 174 return data 175 if isinstance(data, Unset): 176 return data 177 try: 178 if not isinstance(data, dict): 179 raise TypeError() 180 environment_configuration_type_0 = AgentDetailEnvironmentConfiguration.from_dict(data) 181 182 return environment_configuration_type_0 183 except (TypeError, ValueError, AttributeError, KeyError): 184 pass 185 return cast(AgentDetailEnvironmentConfiguration | None | Unset, data) 186 187 environment_configuration = _parse_environment_configuration(d.pop("environmentConfiguration", UNSET)) 188 189 agent_detail = cls( 190 id=id, 191 name=name, 192 agent_role_arn=agent_role_arn, 193 status=status, 194 created_by=created_by, 195 created_at=created_at, 196 updated_at=updated_at, 197 registration=registration, 198 tags=tags, 199 environment_configuration=environment_configuration, 200 ) 201 202 agent_detail.additional_properties = d 203 return agent_detail
13@_attrs_define 14class AgentDetailEnvironmentConfiguration: 15 """ """ 16 17 additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 agent_detail_environment_configuration = cls() 29 30 agent_detail_environment_configuration.additional_properties = d 31 return agent_detail_environment_configuration 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> str: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: str) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
Method generated by attrs for class AgentDetailEnvironmentConfiguration.
13@_attrs_define 14class AgentDetailTags: 15 """ """ 16 17 additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 agent_detail_tags = cls() 29 30 agent_detail_tags.additional_properties = d 31 return agent_detail_tags 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> str: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: str) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
21@_attrs_define 22class AgentInput: 23 """ 24 Attributes: 25 name (str): The display name of the agent 26 agent_role_arn (str): Arn of the AWS IAM role or user that the agent will use (JSONSchema format) 27 id (None | str | Unset): The unique ID of the agent (required on create) 28 configuration_options_schema (AgentInputConfigurationOptionsSchema | None | Unset): The configuration options 29 available for the agent 30 environment_configuration (AgentInputEnvironmentConfiguration | None | Unset): The environment configuration for 31 the agent Example: {'PARTITION': 'restart'}. 32 tags (AgentInputTags | None | Unset): The tags associated with the agent displayed to the user Example: 33 {'Support Email': 'it@company.com'}. 34 """ 35 36 name: str 37 agent_role_arn: str 38 id: None | str | Unset = UNSET 39 configuration_options_schema: AgentInputConfigurationOptionsSchema | None | Unset = UNSET 40 environment_configuration: AgentInputEnvironmentConfiguration | None | Unset = UNSET 41 tags: AgentInputTags | None | Unset = UNSET 42 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 43 44 def to_dict(self) -> dict[str, Any]: 45 from ..models.agent_input_configuration_options_schema import AgentInputConfigurationOptionsSchema 46 from ..models.agent_input_environment_configuration import AgentInputEnvironmentConfiguration 47 from ..models.agent_input_tags import AgentInputTags 48 49 name = self.name 50 51 agent_role_arn = self.agent_role_arn 52 53 id: None | str | Unset 54 if isinstance(self.id, Unset): 55 id = UNSET 56 else: 57 id = self.id 58 59 configuration_options_schema: dict[str, Any] | None | Unset 60 if isinstance(self.configuration_options_schema, Unset): 61 configuration_options_schema = UNSET 62 elif isinstance(self.configuration_options_schema, AgentInputConfigurationOptionsSchema): 63 configuration_options_schema = self.configuration_options_schema.to_dict() 64 else: 65 configuration_options_schema = self.configuration_options_schema 66 67 environment_configuration: dict[str, Any] | None | Unset 68 if isinstance(self.environment_configuration, Unset): 69 environment_configuration = UNSET 70 elif isinstance(self.environment_configuration, AgentInputEnvironmentConfiguration): 71 environment_configuration = self.environment_configuration.to_dict() 72 else: 73 environment_configuration = self.environment_configuration 74 75 tags: dict[str, Any] | None | Unset 76 if isinstance(self.tags, Unset): 77 tags = UNSET 78 elif isinstance(self.tags, AgentInputTags): 79 tags = self.tags.to_dict() 80 else: 81 tags = self.tags 82 83 field_dict: dict[str, Any] = {} 84 field_dict.update(self.additional_properties) 85 field_dict.update( 86 { 87 "name": name, 88 "agentRoleArn": agent_role_arn, 89 } 90 ) 91 if id is not UNSET: 92 field_dict["id"] = id 93 if configuration_options_schema is not UNSET: 94 field_dict["configurationOptionsSchema"] = configuration_options_schema 95 if environment_configuration is not UNSET: 96 field_dict["environmentConfiguration"] = environment_configuration 97 if tags is not UNSET: 98 field_dict["tags"] = tags 99 100 return field_dict 101 102 @classmethod 103 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 104 from ..models.agent_input_configuration_options_schema import AgentInputConfigurationOptionsSchema 105 from ..models.agent_input_environment_configuration import AgentInputEnvironmentConfiguration 106 from ..models.agent_input_tags import AgentInputTags 107 108 d = dict(src_dict) 109 name = d.pop("name") 110 111 agent_role_arn = d.pop("agentRoleArn") 112 113 def _parse_id(data: object) -> None | str | Unset: 114 if data is None: 115 return data 116 if isinstance(data, Unset): 117 return data 118 return cast(None | str | Unset, data) 119 120 id = _parse_id(d.pop("id", UNSET)) 121 122 def _parse_configuration_options_schema(data: object) -> AgentInputConfigurationOptionsSchema | None | Unset: 123 if data is None: 124 return data 125 if isinstance(data, Unset): 126 return data 127 try: 128 if not isinstance(data, dict): 129 raise TypeError() 130 configuration_options_schema_type_0 = AgentInputConfigurationOptionsSchema.from_dict(data) 131 132 return configuration_options_schema_type_0 133 except (TypeError, ValueError, AttributeError, KeyError): 134 pass 135 return cast(AgentInputConfigurationOptionsSchema | None | Unset, data) 136 137 configuration_options_schema = _parse_configuration_options_schema(d.pop("configurationOptionsSchema", UNSET)) 138 139 def _parse_environment_configuration(data: object) -> AgentInputEnvironmentConfiguration | None | Unset: 140 if data is None: 141 return data 142 if isinstance(data, Unset): 143 return data 144 try: 145 if not isinstance(data, dict): 146 raise TypeError() 147 environment_configuration_type_0 = AgentInputEnvironmentConfiguration.from_dict(data) 148 149 return environment_configuration_type_0 150 except (TypeError, ValueError, AttributeError, KeyError): 151 pass 152 return cast(AgentInputEnvironmentConfiguration | None | Unset, data) 153 154 environment_configuration = _parse_environment_configuration(d.pop("environmentConfiguration", UNSET)) 155 156 def _parse_tags(data: object) -> AgentInputTags | None | Unset: 157 if data is None: 158 return data 159 if isinstance(data, Unset): 160 return data 161 try: 162 if not isinstance(data, dict): 163 raise TypeError() 164 tags_type_0 = AgentInputTags.from_dict(data) 165 166 return tags_type_0 167 except (TypeError, ValueError, AttributeError, KeyError): 168 pass 169 return cast(AgentInputTags | None | Unset, data) 170 171 tags = _parse_tags(d.pop("tags", UNSET)) 172 173 agent_input = cls( 174 name=name, 175 agent_role_arn=agent_role_arn, 176 id=id, 177 configuration_options_schema=configuration_options_schema, 178 environment_configuration=environment_configuration, 179 tags=tags, 180 ) 181 182 agent_input.additional_properties = d 183 return agent_input 184 185 @property 186 def additional_keys(self) -> list[str]: 187 return list(self.additional_properties.keys()) 188 189 def __getitem__(self, key: str) -> Any: 190 return self.additional_properties[key] 191 192 def __setitem__(self, key: str, value: Any) -> None: 193 self.additional_properties[key] = value 194 195 def __delitem__(self, key: str) -> None: 196 del self.additional_properties[key] 197 198 def __contains__(self, key: str) -> bool: 199 return key in self.additional_properties
Attributes:
- name (str): The display name of the agent
- agent_role_arn (str): Arn of the AWS IAM role or user that the agent will use (JSONSchema format)
- id (None | str | Unset): The unique ID of the agent (required on create)
- configuration_options_schema (AgentInputConfigurationOptionsSchema | None | Unset): The configuration options available for the agent
- environment_configuration (AgentInputEnvironmentConfiguration | None | Unset): The environment configuration for the agent Example: {'PARTITION': 'restart'}.
- tags (AgentInputTags | None | Unset): The tags associated with the agent displayed to the user Example: {'Support Email': 'it@company.com'}.
29def __init__(self, name, agent_role_arn, id=attr_dict['id'].default, configuration_options_schema=attr_dict['configuration_options_schema'].default, environment_configuration=attr_dict['environment_configuration'].default, tags=attr_dict['tags'].default): 30 self.name = name 31 self.agent_role_arn = agent_role_arn 32 self.id = id 33 self.configuration_options_schema = configuration_options_schema 34 self.environment_configuration = environment_configuration 35 self.tags = tags 36 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AgentInput.
44 def to_dict(self) -> dict[str, Any]: 45 from ..models.agent_input_configuration_options_schema import AgentInputConfigurationOptionsSchema 46 from ..models.agent_input_environment_configuration import AgentInputEnvironmentConfiguration 47 from ..models.agent_input_tags import AgentInputTags 48 49 name = self.name 50 51 agent_role_arn = self.agent_role_arn 52 53 id: None | str | Unset 54 if isinstance(self.id, Unset): 55 id = UNSET 56 else: 57 id = self.id 58 59 configuration_options_schema: dict[str, Any] | None | Unset 60 if isinstance(self.configuration_options_schema, Unset): 61 configuration_options_schema = UNSET 62 elif isinstance(self.configuration_options_schema, AgentInputConfigurationOptionsSchema): 63 configuration_options_schema = self.configuration_options_schema.to_dict() 64 else: 65 configuration_options_schema = self.configuration_options_schema 66 67 environment_configuration: dict[str, Any] | None | Unset 68 if isinstance(self.environment_configuration, Unset): 69 environment_configuration = UNSET 70 elif isinstance(self.environment_configuration, AgentInputEnvironmentConfiguration): 71 environment_configuration = self.environment_configuration.to_dict() 72 else: 73 environment_configuration = self.environment_configuration 74 75 tags: dict[str, Any] | None | Unset 76 if isinstance(self.tags, Unset): 77 tags = UNSET 78 elif isinstance(self.tags, AgentInputTags): 79 tags = self.tags.to_dict() 80 else: 81 tags = self.tags 82 83 field_dict: dict[str, Any] = {} 84 field_dict.update(self.additional_properties) 85 field_dict.update( 86 { 87 "name": name, 88 "agentRoleArn": agent_role_arn, 89 } 90 ) 91 if id is not UNSET: 92 field_dict["id"] = id 93 if configuration_options_schema is not UNSET: 94 field_dict["configurationOptionsSchema"] = configuration_options_schema 95 if environment_configuration is not UNSET: 96 field_dict["environmentConfiguration"] = environment_configuration 97 if tags is not UNSET: 98 field_dict["tags"] = tags 99 100 return field_dict
102 @classmethod 103 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 104 from ..models.agent_input_configuration_options_schema import AgentInputConfigurationOptionsSchema 105 from ..models.agent_input_environment_configuration import AgentInputEnvironmentConfiguration 106 from ..models.agent_input_tags import AgentInputTags 107 108 d = dict(src_dict) 109 name = d.pop("name") 110 111 agent_role_arn = d.pop("agentRoleArn") 112 113 def _parse_id(data: object) -> None | str | Unset: 114 if data is None: 115 return data 116 if isinstance(data, Unset): 117 return data 118 return cast(None | str | Unset, data) 119 120 id = _parse_id(d.pop("id", UNSET)) 121 122 def _parse_configuration_options_schema(data: object) -> AgentInputConfigurationOptionsSchema | None | Unset: 123 if data is None: 124 return data 125 if isinstance(data, Unset): 126 return data 127 try: 128 if not isinstance(data, dict): 129 raise TypeError() 130 configuration_options_schema_type_0 = AgentInputConfigurationOptionsSchema.from_dict(data) 131 132 return configuration_options_schema_type_0 133 except (TypeError, ValueError, AttributeError, KeyError): 134 pass 135 return cast(AgentInputConfigurationOptionsSchema | None | Unset, data) 136 137 configuration_options_schema = _parse_configuration_options_schema(d.pop("configurationOptionsSchema", UNSET)) 138 139 def _parse_environment_configuration(data: object) -> AgentInputEnvironmentConfiguration | None | Unset: 140 if data is None: 141 return data 142 if isinstance(data, Unset): 143 return data 144 try: 145 if not isinstance(data, dict): 146 raise TypeError() 147 environment_configuration_type_0 = AgentInputEnvironmentConfiguration.from_dict(data) 148 149 return environment_configuration_type_0 150 except (TypeError, ValueError, AttributeError, KeyError): 151 pass 152 return cast(AgentInputEnvironmentConfiguration | None | Unset, data) 153 154 environment_configuration = _parse_environment_configuration(d.pop("environmentConfiguration", UNSET)) 155 156 def _parse_tags(data: object) -> AgentInputTags | None | Unset: 157 if data is None: 158 return data 159 if isinstance(data, Unset): 160 return data 161 try: 162 if not isinstance(data, dict): 163 raise TypeError() 164 tags_type_0 = AgentInputTags.from_dict(data) 165 166 return tags_type_0 167 except (TypeError, ValueError, AttributeError, KeyError): 168 pass 169 return cast(AgentInputTags | None | Unset, data) 170 171 tags = _parse_tags(d.pop("tags", UNSET)) 172 173 agent_input = cls( 174 name=name, 175 agent_role_arn=agent_role_arn, 176 id=id, 177 configuration_options_schema=configuration_options_schema, 178 environment_configuration=environment_configuration, 179 tags=tags, 180 ) 181 182 agent_input.additional_properties = d 183 return agent_input
13@_attrs_define 14class AgentInputConfigurationOptionsSchema: 15 """The configuration options available for the agent""" 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 agent_input_configuration_options_schema = cls() 29 30 agent_input_configuration_options_schema.additional_properties = d 31 return agent_input_configuration_options_schema 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
The configuration options available for the agent
Method generated by attrs for class AgentInputConfigurationOptionsSchema.
13@_attrs_define 14class AgentInputEnvironmentConfiguration: 15 """The environment configuration for the agent 16 17 Example: 18 {'PARTITION': 'restart'} 19 20 """ 21 22 additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> dict[str, Any]: 25 field_dict: dict[str, Any] = {} 26 field_dict.update(self.additional_properties) 27 28 return field_dict 29 30 @classmethod 31 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 32 d = dict(src_dict) 33 agent_input_environment_configuration = cls() 34 35 agent_input_environment_configuration.additional_properties = d 36 return agent_input_environment_configuration 37 38 @property 39 def additional_keys(self) -> list[str]: 40 return list(self.additional_properties.keys()) 41 42 def __getitem__(self, key: str) -> str: 43 return self.additional_properties[key] 44 45 def __setitem__(self, key: str, value: str) -> None: 46 self.additional_properties[key] = value 47 48 def __delitem__(self, key: str) -> None: 49 del self.additional_properties[key] 50 51 def __contains__(self, key: str) -> bool: 52 return key in self.additional_properties
The environment configuration for the agent
Example:
{'PARTITION': 'restart'}
Method generated by attrs for class AgentInputEnvironmentConfiguration.
13@_attrs_define 14class AgentInputTags: 15 """The tags associated with the agent displayed to the user 16 17 Example: 18 {'Support Email': 'it@company.com'} 19 20 """ 21 22 additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> dict[str, Any]: 25 field_dict: dict[str, Any] = {} 26 field_dict.update(self.additional_properties) 27 28 return field_dict 29 30 @classmethod 31 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 32 d = dict(src_dict) 33 agent_input_tags = cls() 34 35 agent_input_tags.additional_properties = d 36 return agent_input_tags 37 38 @property 39 def additional_keys(self) -> list[str]: 40 return list(self.additional_properties.keys()) 41 42 def __getitem__(self, key: str) -> str: 43 return self.additional_properties[key] 44 45 def __setitem__(self, key: str, value: str) -> None: 46 self.additional_properties[key] = value 47 48 def __delitem__(self, key: str) -> None: 49 del self.additional_properties[key] 50 51 def __contains__(self, key: str) -> bool: 52 return key in self.additional_properties
The tags associated with the agent displayed to the user
Example:
{'Support Email': 'it@company.com'}
13@_attrs_define 14class AgentRegistration: 15 """ 16 Attributes: 17 local_ip (str): 18 remote_ip (str): 19 agent_version (str): 20 hostname (str): 21 os (str): 22 """ 23 24 local_ip: str 25 remote_ip: str 26 agent_version: str 27 hostname: str 28 os: str 29 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 30 31 def to_dict(self) -> dict[str, Any]: 32 local_ip = self.local_ip 33 34 remote_ip = self.remote_ip 35 36 agent_version = self.agent_version 37 38 hostname = self.hostname 39 40 os = self.os 41 42 field_dict: dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "localIp": local_ip, 47 "remoteIp": remote_ip, 48 "agentVersion": agent_version, 49 "hostname": hostname, 50 "os": os, 51 } 52 ) 53 54 return field_dict 55 56 @classmethod 57 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 58 d = dict(src_dict) 59 local_ip = d.pop("localIp") 60 61 remote_ip = d.pop("remoteIp") 62 63 agent_version = d.pop("agentVersion") 64 65 hostname = d.pop("hostname") 66 67 os = d.pop("os") 68 69 agent_registration = cls( 70 local_ip=local_ip, 71 remote_ip=remote_ip, 72 agent_version=agent_version, 73 hostname=hostname, 74 os=os, 75 ) 76 77 agent_registration.additional_properties = d 78 return agent_registration 79 80 @property 81 def additional_keys(self) -> list[str]: 82 return list(self.additional_properties.keys()) 83 84 def __getitem__(self, key: str) -> Any: 85 return self.additional_properties[key] 86 87 def __setitem__(self, key: str, value: Any) -> None: 88 self.additional_properties[key] = value 89 90 def __delitem__(self, key: str) -> None: 91 del self.additional_properties[key] 92 93 def __contains__(self, key: str) -> bool: 94 return key in self.additional_properties
Attributes:
- local_ip (str):
- remote_ip (str):
- agent_version (str):
- hostname (str):
- os (str):
28def __init__(self, local_ip, remote_ip, agent_version, hostname, os): 29 self.local_ip = local_ip 30 self.remote_ip = remote_ip 31 self.agent_version = agent_version 32 self.hostname = hostname 33 self.os = os 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AgentRegistration.
31 def to_dict(self) -> dict[str, Any]: 32 local_ip = self.local_ip 33 34 remote_ip = self.remote_ip 35 36 agent_version = self.agent_version 37 38 hostname = self.hostname 39 40 os = self.os 41 42 field_dict: dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "localIp": local_ip, 47 "remoteIp": remote_ip, 48 "agentVersion": agent_version, 49 "hostname": hostname, 50 "os": os, 51 } 52 ) 53 54 return field_dict
56 @classmethod 57 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 58 d = dict(src_dict) 59 local_ip = d.pop("localIp") 60 61 remote_ip = d.pop("remoteIp") 62 63 agent_version = d.pop("agentVersion") 64 65 hostname = d.pop("hostname") 66 67 os = d.pop("os") 68 69 agent_registration = cls( 70 local_ip=local_ip, 71 remote_ip=remote_ip, 72 agent_version=agent_version, 73 hostname=hostname, 74 os=os, 75 ) 76 77 agent_registration.additional_properties = d 78 return agent_registration
5class AgentStatus(str, Enum): 6 OFFLINE = "OFFLINE" 7 ONLINE = "ONLINE" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
13@_attrs_define 14class AgentTags: 15 """Tags associated with the agent""" 16 17 additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 agent_tags = cls() 29 30 agent_tags.additional_properties = d 31 return agent_tags 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> str: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: str) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
Tags associated with the agent
17@_attrs_define 18class AllowedDataType: 19 """ 20 Attributes: 21 description (str): 22 error_msg (str): 23 allowed_patterns (list[FileNamePattern]): 24 """ 25 26 description: str 27 error_msg: str 28 allowed_patterns: list[FileNamePattern] 29 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 30 31 def to_dict(self) -> dict[str, Any]: 32 description = self.description 33 34 error_msg = self.error_msg 35 36 allowed_patterns = [] 37 for allowed_patterns_item_data in self.allowed_patterns: 38 allowed_patterns_item = allowed_patterns_item_data.to_dict() 39 allowed_patterns.append(allowed_patterns_item) 40 41 field_dict: dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update( 44 { 45 "description": description, 46 "errorMsg": error_msg, 47 "allowedPatterns": allowed_patterns, 48 } 49 ) 50 51 return field_dict 52 53 @classmethod 54 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 55 from ..models.file_name_pattern import FileNamePattern 56 57 d = dict(src_dict) 58 description = d.pop("description") 59 60 error_msg = d.pop("errorMsg") 61 62 allowed_patterns = [] 63 _allowed_patterns = d.pop("allowedPatterns") 64 for allowed_patterns_item_data in _allowed_patterns: 65 allowed_patterns_item = FileNamePattern.from_dict(allowed_patterns_item_data) 66 67 allowed_patterns.append(allowed_patterns_item) 68 69 allowed_data_type = cls( 70 description=description, 71 error_msg=error_msg, 72 allowed_patterns=allowed_patterns, 73 ) 74 75 allowed_data_type.additional_properties = d 76 return allowed_data_type 77 78 @property 79 def additional_keys(self) -> list[str]: 80 return list(self.additional_properties.keys()) 81 82 def __getitem__(self, key: str) -> Any: 83 return self.additional_properties[key] 84 85 def __setitem__(self, key: str, value: Any) -> None: 86 self.additional_properties[key] = value 87 88 def __delitem__(self, key: str) -> None: 89 del self.additional_properties[key] 90 91 def __contains__(self, key: str) -> bool: 92 return key in self.additional_properties
Attributes:
- description (str):
- error_msg (str):
- allowed_patterns (list[FileNamePattern]):
26def __init__(self, description, error_msg, allowed_patterns): 27 self.description = description 28 self.error_msg = error_msg 29 self.allowed_patterns = allowed_patterns 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AllowedDataType.
31 def to_dict(self) -> dict[str, Any]: 32 description = self.description 33 34 error_msg = self.error_msg 35 36 allowed_patterns = [] 37 for allowed_patterns_item_data in self.allowed_patterns: 38 allowed_patterns_item = allowed_patterns_item_data.to_dict() 39 allowed_patterns.append(allowed_patterns_item) 40 41 field_dict: dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update( 44 { 45 "description": description, 46 "errorMsg": error_msg, 47 "allowedPatterns": allowed_patterns, 48 } 49 ) 50 51 return field_dict
53 @classmethod 54 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 55 from ..models.file_name_pattern import FileNamePattern 56 57 d = dict(src_dict) 58 description = d.pop("description") 59 60 error_msg = d.pop("errorMsg") 61 62 allowed_patterns = [] 63 _allowed_patterns = d.pop("allowedPatterns") 64 for allowed_patterns_item_data in _allowed_patterns: 65 allowed_patterns_item = FileNamePattern.from_dict(allowed_patterns_item_data) 66 67 allowed_patterns.append(allowed_patterns_item) 68 69 allowed_data_type = cls( 70 description=description, 71 error_msg=error_msg, 72 allowed_patterns=allowed_patterns, 73 ) 74 75 allowed_data_type.additional_properties = d 76 return allowed_data_type
15@_attrs_define 16class ApproveProjectAccessRequest: 17 """ 18 Attributes: 19 role (ProjectRole): 20 """ 21 22 role: ProjectRole 23 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 24 25 def to_dict(self) -> dict[str, Any]: 26 role = self.role.value 27 28 field_dict: dict[str, Any] = {} 29 field_dict.update(self.additional_properties) 30 field_dict.update( 31 { 32 "role": role, 33 } 34 ) 35 36 return field_dict 37 38 @classmethod 39 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 40 d = dict(src_dict) 41 role = ProjectRole(d.pop("role")) 42 43 approve_project_access_request = cls( 44 role=role, 45 ) 46 47 approve_project_access_request.additional_properties = d 48 return approve_project_access_request 49 50 @property 51 def additional_keys(self) -> list[str]: 52 return list(self.additional_properties.keys()) 53 54 def __getitem__(self, key: str) -> Any: 55 return self.additional_properties[key] 56 57 def __setitem__(self, key: str, value: Any) -> None: 58 self.additional_properties[key] = value 59 60 def __delitem__(self, key: str) -> None: 61 del self.additional_properties[key] 62 63 def __contains__(self, key: str) -> bool: 64 return key in self.additional_properties
Attributes:
- role (ProjectRole):
24def __init__(self, role): 25 self.role = role 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ApproveProjectAccessRequest.
38 @classmethod 39 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 40 d = dict(src_dict) 41 role = ProjectRole(d.pop("role")) 42 43 approve_project_access_request = cls( 44 role=role, 45 ) 46 47 approve_project_access_request.additional_properties = d 48 return approve_project_access_request
15@_attrs_define 16class Artifact: 17 """A secondary file or resource associated with a dataset 18 19 Attributes: 20 type_ (ArtifactType): 21 path (str): 22 """ 23 24 type_: ArtifactType 25 path: str 26 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 27 28 def to_dict(self) -> dict[str, Any]: 29 type_ = self.type_.value 30 31 path = self.path 32 33 field_dict: dict[str, Any] = {} 34 field_dict.update(self.additional_properties) 35 field_dict.update( 36 { 37 "type": type_, 38 "path": path, 39 } 40 ) 41 42 return field_dict 43 44 @classmethod 45 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 46 d = dict(src_dict) 47 type_ = ArtifactType(d.pop("type")) 48 49 path = d.pop("path") 50 51 artifact = cls( 52 type_=type_, 53 path=path, 54 ) 55 56 artifact.additional_properties = d 57 return artifact 58 59 @property 60 def additional_keys(self) -> list[str]: 61 return list(self.additional_properties.keys()) 62 63 def __getitem__(self, key: str) -> Any: 64 return self.additional_properties[key] 65 66 def __setitem__(self, key: str, value: Any) -> None: 67 self.additional_properties[key] = value 68 69 def __delitem__(self, key: str) -> None: 70 del self.additional_properties[key] 71 72 def __contains__(self, key: str) -> bool: 73 return key in self.additional_properties
A secondary file or resource associated with a dataset
Attributes:
- type_ (ArtifactType):
- path (str):
25def __init__(self, type_, path): 26 self.type_ = type_ 27 self.path = path 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Artifact.
5class ArtifactType(str, Enum): 6 FILES = "FILES" 7 INGEST_MANIFEST = "INGEST_MANIFEST" 8 METADATA = "METADATA" 9 SAMPLE_SHEET = "SAMPLE_SHEET" 10 WORKFLOW_COMPUTE_CONFIG = "WORKFLOW_COMPUTE_CONFIG" 11 WORKFLOW_DAG = "WORKFLOW_DAG" 12 WORKFLOW_DEBUG_LOGS = "WORKFLOW_DEBUG_LOGS" 13 WORKFLOW_LOGS = "WORKFLOW_LOGS" 14 WORKFLOW_OPTIONS = "WORKFLOW_OPTIONS" 15 WORKFLOW_PARAMETERS = "WORKFLOW_PARAMETERS" 16 WORKFLOW_REPORT = "WORKFLOW_REPORT" 17 WORKFLOW_TIMELINE = "WORKFLOW_TIMELINE" 18 WORKFLOW_TRACE = "WORKFLOW_TRACE" 19 UNKNOWN = "UNKNOWN" 20 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 21 22 def __str__(self) -> str: 23 return str(self.value) 24 25 @classmethod 26 def _missing_(cls, number): 27 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
22@_attrs_define 23class AuditEvent: 24 """ 25 Attributes: 26 id (str | Unset): The unique identifier for the audit event 27 event_type (str | Unset): The type of event Example: CREATE. 28 project_id (str | Unset): The project ID associated with the event (if applicable) 29 entity_id (str | Unset): The entity ID associated with the event 30 entity_type (str | Unset): The entity type associated with the event Example: Project. 31 event_detail (AuditEventEventDetail | None | Unset): The details of the event, such as the request details sent 32 from the client 33 changes (AuditEventChanges | None | Unset): The changes made to the entity (if applicable) Example: 34 {'.settings.retentionPolicyDays': '1 -> 2'}. 35 username (str | Unset): The username of the user who performed the action Example: admin@cirro.bio. 36 ip_address (str | Unset): The IP address of the user who performed the action Example: 0.0.0.0. 37 created_at (datetime.datetime | Unset): The date and time the event was created 38 """ 39 40 id: str | Unset = UNSET 41 event_type: str | Unset = UNSET 42 project_id: str | Unset = UNSET 43 entity_id: str | Unset = UNSET 44 entity_type: str | Unset = UNSET 45 event_detail: AuditEventEventDetail | None | Unset = UNSET 46 changes: AuditEventChanges | None | Unset = UNSET 47 username: str | Unset = UNSET 48 ip_address: str | Unset = UNSET 49 created_at: datetime.datetime | Unset = UNSET 50 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 51 52 def to_dict(self) -> dict[str, Any]: 53 from ..models.audit_event_changes import AuditEventChanges 54 from ..models.audit_event_event_detail import AuditEventEventDetail 55 56 id = self.id 57 58 event_type = self.event_type 59 60 project_id = self.project_id 61 62 entity_id = self.entity_id 63 64 entity_type = self.entity_type 65 66 event_detail: dict[str, Any] | None | Unset 67 if isinstance(self.event_detail, Unset): 68 event_detail = UNSET 69 elif isinstance(self.event_detail, AuditEventEventDetail): 70 event_detail = self.event_detail.to_dict() 71 else: 72 event_detail = self.event_detail 73 74 changes: dict[str, Any] | None | Unset 75 if isinstance(self.changes, Unset): 76 changes = UNSET 77 elif isinstance(self.changes, AuditEventChanges): 78 changes = self.changes.to_dict() 79 else: 80 changes = self.changes 81 82 username = self.username 83 84 ip_address = self.ip_address 85 86 created_at: str | Unset = UNSET 87 if not isinstance(self.created_at, Unset): 88 created_at = self.created_at.isoformat() 89 90 field_dict: dict[str, Any] = {} 91 field_dict.update(self.additional_properties) 92 field_dict.update({}) 93 if id is not UNSET: 94 field_dict["id"] = id 95 if event_type is not UNSET: 96 field_dict["eventType"] = event_type 97 if project_id is not UNSET: 98 field_dict["projectId"] = project_id 99 if entity_id is not UNSET: 100 field_dict["entityId"] = entity_id 101 if entity_type is not UNSET: 102 field_dict["entityType"] = entity_type 103 if event_detail is not UNSET: 104 field_dict["eventDetail"] = event_detail 105 if changes is not UNSET: 106 field_dict["changes"] = changes 107 if username is not UNSET: 108 field_dict["username"] = username 109 if ip_address is not UNSET: 110 field_dict["ipAddress"] = ip_address 111 if created_at is not UNSET: 112 field_dict["createdAt"] = created_at 113 114 return field_dict 115 116 @classmethod 117 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 118 from ..models.audit_event_changes import AuditEventChanges 119 from ..models.audit_event_event_detail import AuditEventEventDetail 120 121 d = dict(src_dict) 122 id = d.pop("id", UNSET) 123 124 event_type = d.pop("eventType", UNSET) 125 126 project_id = d.pop("projectId", UNSET) 127 128 entity_id = d.pop("entityId", UNSET) 129 130 entity_type = d.pop("entityType", UNSET) 131 132 def _parse_event_detail(data: object) -> AuditEventEventDetail | None | Unset: 133 if data is None: 134 return data 135 if isinstance(data, Unset): 136 return data 137 try: 138 if not isinstance(data, dict): 139 raise TypeError() 140 event_detail_type_0 = AuditEventEventDetail.from_dict(data) 141 142 return event_detail_type_0 143 except (TypeError, ValueError, AttributeError, KeyError): 144 pass 145 return cast(AuditEventEventDetail | None | Unset, data) 146 147 event_detail = _parse_event_detail(d.pop("eventDetail", UNSET)) 148 149 def _parse_changes(data: object) -> AuditEventChanges | None | Unset: 150 if data is None: 151 return data 152 if isinstance(data, Unset): 153 return data 154 try: 155 if not isinstance(data, dict): 156 raise TypeError() 157 changes_type_0 = AuditEventChanges.from_dict(data) 158 159 return changes_type_0 160 except (TypeError, ValueError, AttributeError, KeyError): 161 pass 162 return cast(AuditEventChanges | None | Unset, data) 163 164 changes = _parse_changes(d.pop("changes", UNSET)) 165 166 username = d.pop("username", UNSET) 167 168 ip_address = d.pop("ipAddress", UNSET) 169 170 _created_at = d.pop("createdAt", UNSET) 171 created_at: datetime.datetime | Unset 172 if isinstance(_created_at, Unset): 173 created_at = UNSET 174 else: 175 created_at = isoparse(_created_at) 176 177 audit_event = cls( 178 id=id, 179 event_type=event_type, 180 project_id=project_id, 181 entity_id=entity_id, 182 entity_type=entity_type, 183 event_detail=event_detail, 184 changes=changes, 185 username=username, 186 ip_address=ip_address, 187 created_at=created_at, 188 ) 189 190 audit_event.additional_properties = d 191 return audit_event 192 193 @property 194 def additional_keys(self) -> list[str]: 195 return list(self.additional_properties.keys()) 196 197 def __getitem__(self, key: str) -> Any: 198 return self.additional_properties[key] 199 200 def __setitem__(self, key: str, value: Any) -> None: 201 self.additional_properties[key] = value 202 203 def __delitem__(self, key: str) -> None: 204 del self.additional_properties[key] 205 206 def __contains__(self, key: str) -> bool: 207 return key in self.additional_properties
Attributes:
- id (str | Unset): The unique identifier for the audit event
- event_type (str | Unset): The type of event Example: CREATE.
- project_id (str | Unset): The project ID associated with the event (if applicable)
- entity_id (str | Unset): The entity ID associated with the event
- entity_type (str | Unset): The entity type associated with the event Example: Project.
- event_detail (AuditEventEventDetail | None | Unset): The details of the event, such as the request details sent from the client
- changes (AuditEventChanges | None | Unset): The changes made to the entity (if applicable) Example: {'.settings.retentionPolicyDays': '1 -> 2'}.
- username (str | Unset): The username of the user who performed the action Example: admin@cirro.bio.
- ip_address (str | Unset): The IP address of the user who performed the action Example: 0.0.0.0.
- created_at (datetime.datetime | Unset): The date and time the event was created
33def __init__(self, id=attr_dict['id'].default, event_type=attr_dict['event_type'].default, project_id=attr_dict['project_id'].default, entity_id=attr_dict['entity_id'].default, entity_type=attr_dict['entity_type'].default, event_detail=attr_dict['event_detail'].default, changes=attr_dict['changes'].default, username=attr_dict['username'].default, ip_address=attr_dict['ip_address'].default, created_at=attr_dict['created_at'].default): 34 self.id = id 35 self.event_type = event_type 36 self.project_id = project_id 37 self.entity_id = entity_id 38 self.entity_type = entity_type 39 self.event_detail = event_detail 40 self.changes = changes 41 self.username = username 42 self.ip_address = ip_address 43 self.created_at = created_at 44 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AuditEvent.
52 def to_dict(self) -> dict[str, Any]: 53 from ..models.audit_event_changes import AuditEventChanges 54 from ..models.audit_event_event_detail import AuditEventEventDetail 55 56 id = self.id 57 58 event_type = self.event_type 59 60 project_id = self.project_id 61 62 entity_id = self.entity_id 63 64 entity_type = self.entity_type 65 66 event_detail: dict[str, Any] | None | Unset 67 if isinstance(self.event_detail, Unset): 68 event_detail = UNSET 69 elif isinstance(self.event_detail, AuditEventEventDetail): 70 event_detail = self.event_detail.to_dict() 71 else: 72 event_detail = self.event_detail 73 74 changes: dict[str, Any] | None | Unset 75 if isinstance(self.changes, Unset): 76 changes = UNSET 77 elif isinstance(self.changes, AuditEventChanges): 78 changes = self.changes.to_dict() 79 else: 80 changes = self.changes 81 82 username = self.username 83 84 ip_address = self.ip_address 85 86 created_at: str | Unset = UNSET 87 if not isinstance(self.created_at, Unset): 88 created_at = self.created_at.isoformat() 89 90 field_dict: dict[str, Any] = {} 91 field_dict.update(self.additional_properties) 92 field_dict.update({}) 93 if id is not UNSET: 94 field_dict["id"] = id 95 if event_type is not UNSET: 96 field_dict["eventType"] = event_type 97 if project_id is not UNSET: 98 field_dict["projectId"] = project_id 99 if entity_id is not UNSET: 100 field_dict["entityId"] = entity_id 101 if entity_type is not UNSET: 102 field_dict["entityType"] = entity_type 103 if event_detail is not UNSET: 104 field_dict["eventDetail"] = event_detail 105 if changes is not UNSET: 106 field_dict["changes"] = changes 107 if username is not UNSET: 108 field_dict["username"] = username 109 if ip_address is not UNSET: 110 field_dict["ipAddress"] = ip_address 111 if created_at is not UNSET: 112 field_dict["createdAt"] = created_at 113 114 return field_dict
116 @classmethod 117 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 118 from ..models.audit_event_changes import AuditEventChanges 119 from ..models.audit_event_event_detail import AuditEventEventDetail 120 121 d = dict(src_dict) 122 id = d.pop("id", UNSET) 123 124 event_type = d.pop("eventType", UNSET) 125 126 project_id = d.pop("projectId", UNSET) 127 128 entity_id = d.pop("entityId", UNSET) 129 130 entity_type = d.pop("entityType", UNSET) 131 132 def _parse_event_detail(data: object) -> AuditEventEventDetail | None | Unset: 133 if data is None: 134 return data 135 if isinstance(data, Unset): 136 return data 137 try: 138 if not isinstance(data, dict): 139 raise TypeError() 140 event_detail_type_0 = AuditEventEventDetail.from_dict(data) 141 142 return event_detail_type_0 143 except (TypeError, ValueError, AttributeError, KeyError): 144 pass 145 return cast(AuditEventEventDetail | None | Unset, data) 146 147 event_detail = _parse_event_detail(d.pop("eventDetail", UNSET)) 148 149 def _parse_changes(data: object) -> AuditEventChanges | None | Unset: 150 if data is None: 151 return data 152 if isinstance(data, Unset): 153 return data 154 try: 155 if not isinstance(data, dict): 156 raise TypeError() 157 changes_type_0 = AuditEventChanges.from_dict(data) 158 159 return changes_type_0 160 except (TypeError, ValueError, AttributeError, KeyError): 161 pass 162 return cast(AuditEventChanges | None | Unset, data) 163 164 changes = _parse_changes(d.pop("changes", UNSET)) 165 166 username = d.pop("username", UNSET) 167 168 ip_address = d.pop("ipAddress", UNSET) 169 170 _created_at = d.pop("createdAt", UNSET) 171 created_at: datetime.datetime | Unset 172 if isinstance(_created_at, Unset): 173 created_at = UNSET 174 else: 175 created_at = isoparse(_created_at) 176 177 audit_event = cls( 178 id=id, 179 event_type=event_type, 180 project_id=project_id, 181 entity_id=entity_id, 182 entity_type=entity_type, 183 event_detail=event_detail, 184 changes=changes, 185 username=username, 186 ip_address=ip_address, 187 created_at=created_at, 188 ) 189 190 audit_event.additional_properties = d 191 return audit_event
13@_attrs_define 14class AuditEventChanges: 15 """The changes made to the entity (if applicable) 16 17 Example: 18 {'.settings.retentionPolicyDays': '1 -> 2'} 19 20 """ 21 22 additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> dict[str, Any]: 25 field_dict: dict[str, Any] = {} 26 field_dict.update(self.additional_properties) 27 28 return field_dict 29 30 @classmethod 31 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 32 d = dict(src_dict) 33 audit_event_changes = cls() 34 35 audit_event_changes.additional_properties = d 36 return audit_event_changes 37 38 @property 39 def additional_keys(self) -> list[str]: 40 return list(self.additional_properties.keys()) 41 42 def __getitem__(self, key: str) -> str: 43 return self.additional_properties[key] 44 45 def __setitem__(self, key: str, value: str) -> None: 46 self.additional_properties[key] = value 47 48 def __delitem__(self, key: str) -> None: 49 del self.additional_properties[key] 50 51 def __contains__(self, key: str) -> bool: 52 return key in self.additional_properties
The changes made to the entity (if applicable)
Example:
{'.settings.retentionPolicyDays': '1 -> 2'}
13@_attrs_define 14class AuditEventEventDetail: 15 """The details of the event, such as the request details sent from the client""" 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 audit_event_event_detail = cls() 29 30 audit_event_event_detail.additional_properties = d 31 return audit_event_event_detail 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
The details of the event, such as the request details sent from the client
13@_attrs_define 14class AuthInfo: 15 """ 16 Attributes: 17 user_pool_id (str): 18 sdk_app_id (str): 19 ui_app_id (str): 20 drive_app_id (str): 21 endpoint (str): 22 """ 23 24 user_pool_id: str 25 sdk_app_id: str 26 ui_app_id: str 27 drive_app_id: str 28 endpoint: str 29 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 30 31 def to_dict(self) -> dict[str, Any]: 32 user_pool_id = self.user_pool_id 33 34 sdk_app_id = self.sdk_app_id 35 36 ui_app_id = self.ui_app_id 37 38 drive_app_id = self.drive_app_id 39 40 endpoint = self.endpoint 41 42 field_dict: dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "userPoolId": user_pool_id, 47 "sdkAppId": sdk_app_id, 48 "uiAppId": ui_app_id, 49 "driveAppId": drive_app_id, 50 "endpoint": endpoint, 51 } 52 ) 53 54 return field_dict 55 56 @classmethod 57 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 58 d = dict(src_dict) 59 user_pool_id = d.pop("userPoolId") 60 61 sdk_app_id = d.pop("sdkAppId") 62 63 ui_app_id = d.pop("uiAppId") 64 65 drive_app_id = d.pop("driveAppId") 66 67 endpoint = d.pop("endpoint") 68 69 auth_info = cls( 70 user_pool_id=user_pool_id, 71 sdk_app_id=sdk_app_id, 72 ui_app_id=ui_app_id, 73 drive_app_id=drive_app_id, 74 endpoint=endpoint, 75 ) 76 77 auth_info.additional_properties = d 78 return auth_info 79 80 @property 81 def additional_keys(self) -> list[str]: 82 return list(self.additional_properties.keys()) 83 84 def __getitem__(self, key: str) -> Any: 85 return self.additional_properties[key] 86 87 def __setitem__(self, key: str, value: Any) -> None: 88 self.additional_properties[key] = value 89 90 def __delitem__(self, key: str) -> None: 91 del self.additional_properties[key] 92 93 def __contains__(self, key: str) -> bool: 94 return key in self.additional_properties
Attributes:
- user_pool_id (str):
- sdk_app_id (str):
- ui_app_id (str):
- drive_app_id (str):
- endpoint (str):
28def __init__(self, user_pool_id, sdk_app_id, ui_app_id, drive_app_id, endpoint): 29 self.user_pool_id = user_pool_id 30 self.sdk_app_id = sdk_app_id 31 self.ui_app_id = ui_app_id 32 self.drive_app_id = drive_app_id 33 self.endpoint = endpoint 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AuthInfo.
31 def to_dict(self) -> dict[str, Any]: 32 user_pool_id = self.user_pool_id 33 34 sdk_app_id = self.sdk_app_id 35 36 ui_app_id = self.ui_app_id 37 38 drive_app_id = self.drive_app_id 39 40 endpoint = self.endpoint 41 42 field_dict: dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "userPoolId": user_pool_id, 47 "sdkAppId": sdk_app_id, 48 "uiAppId": ui_app_id, 49 "driveAppId": drive_app_id, 50 "endpoint": endpoint, 51 } 52 ) 53 54 return field_dict
56 @classmethod 57 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 58 d = dict(src_dict) 59 user_pool_id = d.pop("userPoolId") 60 61 sdk_app_id = d.pop("sdkAppId") 62 63 ui_app_id = d.pop("uiAppId") 64 65 drive_app_id = d.pop("driveAppId") 66 67 endpoint = d.pop("endpoint") 68 69 auth_info = cls( 70 user_pool_id=user_pool_id, 71 sdk_app_id=sdk_app_id, 72 ui_app_id=ui_app_id, 73 drive_app_id=drive_app_id, 74 endpoint=endpoint, 75 ) 76 77 auth_info.additional_properties = d 78 return auth_info
17@_attrs_define 18class AWSCredentials: 19 """ 20 Attributes: 21 access_key_id (str): 22 secret_access_key (str): 23 session_token (str): 24 expiration (datetime.datetime): 25 region (str | Unset): Region of requested resource (i.e., S3 Bucket) 26 """ 27 28 access_key_id: str 29 secret_access_key: str 30 session_token: str 31 expiration: datetime.datetime 32 region: str | Unset = UNSET 33 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 34 35 def to_dict(self) -> dict[str, Any]: 36 access_key_id = self.access_key_id 37 38 secret_access_key = self.secret_access_key 39 40 session_token = self.session_token 41 42 expiration = self.expiration.isoformat() 43 44 region = self.region 45 46 field_dict: dict[str, Any] = {} 47 field_dict.update(self.additional_properties) 48 field_dict.update( 49 { 50 "accessKeyId": access_key_id, 51 "secretAccessKey": secret_access_key, 52 "sessionToken": session_token, 53 "expiration": expiration, 54 } 55 ) 56 if region is not UNSET: 57 field_dict["region"] = region 58 59 return field_dict 60 61 @classmethod 62 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 63 d = dict(src_dict) 64 access_key_id = d.pop("accessKeyId") 65 66 secret_access_key = d.pop("secretAccessKey") 67 68 session_token = d.pop("sessionToken") 69 70 expiration = isoparse(d.pop("expiration")) 71 72 region = d.pop("region", UNSET) 73 74 aws_credentials = cls( 75 access_key_id=access_key_id, 76 secret_access_key=secret_access_key, 77 session_token=session_token, 78 expiration=expiration, 79 region=region, 80 ) 81 82 aws_credentials.additional_properties = d 83 return aws_credentials 84 85 @property 86 def additional_keys(self) -> list[str]: 87 return list(self.additional_properties.keys()) 88 89 def __getitem__(self, key: str) -> Any: 90 return self.additional_properties[key] 91 92 def __setitem__(self, key: str, value: Any) -> None: 93 self.additional_properties[key] = value 94 95 def __delitem__(self, key: str) -> None: 96 del self.additional_properties[key] 97 98 def __contains__(self, key: str) -> bool: 99 return key in self.additional_properties
Attributes:
- access_key_id (str):
- secret_access_key (str):
- session_token (str):
- expiration (datetime.datetime):
- region (str | Unset): Region of requested resource (i.e., S3 Bucket)
28def __init__(self, access_key_id, secret_access_key, session_token, expiration, region=attr_dict['region'].default): 29 self.access_key_id = access_key_id 30 self.secret_access_key = secret_access_key 31 self.session_token = session_token 32 self.expiration = expiration 33 self.region = region 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AWSCredentials.
35 def to_dict(self) -> dict[str, Any]: 36 access_key_id = self.access_key_id 37 38 secret_access_key = self.secret_access_key 39 40 session_token = self.session_token 41 42 expiration = self.expiration.isoformat() 43 44 region = self.region 45 46 field_dict: dict[str, Any] = {} 47 field_dict.update(self.additional_properties) 48 field_dict.update( 49 { 50 "accessKeyId": access_key_id, 51 "secretAccessKey": secret_access_key, 52 "sessionToken": session_token, 53 "expiration": expiration, 54 } 55 ) 56 if region is not UNSET: 57 field_dict["region"] = region 58 59 return field_dict
61 @classmethod 62 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 63 d = dict(src_dict) 64 access_key_id = d.pop("accessKeyId") 65 66 secret_access_key = d.pop("secretAccessKey") 67 68 session_token = d.pop("sessionToken") 69 70 expiration = isoparse(d.pop("expiration")) 71 72 region = d.pop("region", UNSET) 73 74 aws_credentials = cls( 75 access_key_id=access_key_id, 76 secret_access_key=secret_access_key, 77 session_token=session_token, 78 expiration=expiration, 79 region=region, 80 ) 81 82 aws_credentials.additional_properties = d 83 return aws_credentials
20@_attrs_define 21class BillingAccount: 22 """ 23 Attributes: 24 id (str): 25 name (str): 26 organization (str): 27 contacts (list[Contact]): 28 customer_type (CustomerType): 29 billing_method (BillingMethod): 30 primary_budget_number (str): 31 owner (str): 32 shared_with (list[str]): 33 is_archived (bool): 34 """ 35 36 id: str 37 name: str 38 organization: str 39 contacts: list[Contact] 40 customer_type: CustomerType 41 billing_method: BillingMethod 42 primary_budget_number: str 43 owner: str 44 shared_with: list[str] 45 is_archived: bool 46 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 47 48 def to_dict(self) -> dict[str, Any]: 49 id = self.id 50 51 name = self.name 52 53 organization = self.organization 54 55 contacts = [] 56 for contacts_item_data in self.contacts: 57 contacts_item = contacts_item_data.to_dict() 58 contacts.append(contacts_item) 59 60 customer_type = self.customer_type.value 61 62 billing_method = self.billing_method.value 63 64 primary_budget_number = self.primary_budget_number 65 66 owner = self.owner 67 68 shared_with = self.shared_with 69 70 is_archived = self.is_archived 71 72 field_dict: dict[str, Any] = {} 73 field_dict.update(self.additional_properties) 74 field_dict.update( 75 { 76 "id": id, 77 "name": name, 78 "organization": organization, 79 "contacts": contacts, 80 "customerType": customer_type, 81 "billingMethod": billing_method, 82 "primaryBudgetNumber": primary_budget_number, 83 "owner": owner, 84 "sharedWith": shared_with, 85 "isArchived": is_archived, 86 } 87 ) 88 89 return field_dict 90 91 @classmethod 92 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 93 from ..models.contact import Contact 94 95 d = dict(src_dict) 96 id = d.pop("id") 97 98 name = d.pop("name") 99 100 organization = d.pop("organization") 101 102 contacts = [] 103 _contacts = d.pop("contacts") 104 for contacts_item_data in _contacts: 105 contacts_item = Contact.from_dict(contacts_item_data) 106 107 contacts.append(contacts_item) 108 109 customer_type = CustomerType(d.pop("customerType")) 110 111 billing_method = BillingMethod(d.pop("billingMethod")) 112 113 primary_budget_number = d.pop("primaryBudgetNumber") 114 115 owner = d.pop("owner") 116 117 shared_with = cast(list[str], d.pop("sharedWith")) 118 119 is_archived = d.pop("isArchived") 120 121 billing_account = cls( 122 id=id, 123 name=name, 124 organization=organization, 125 contacts=contacts, 126 customer_type=customer_type, 127 billing_method=billing_method, 128 primary_budget_number=primary_budget_number, 129 owner=owner, 130 shared_with=shared_with, 131 is_archived=is_archived, 132 ) 133 134 billing_account.additional_properties = d 135 return billing_account 136 137 @property 138 def additional_keys(self) -> list[str]: 139 return list(self.additional_properties.keys()) 140 141 def __getitem__(self, key: str) -> Any: 142 return self.additional_properties[key] 143 144 def __setitem__(self, key: str, value: Any) -> None: 145 self.additional_properties[key] = value 146 147 def __delitem__(self, key: str) -> None: 148 del self.additional_properties[key] 149 150 def __contains__(self, key: str) -> bool: 151 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- organization (str):
- contacts (list[Contact]):
- customer_type (CustomerType):
- billing_method (BillingMethod):
- primary_budget_number (str):
- owner (str):
- shared_with (list[str]):
- is_archived (bool):
33def __init__(self, id, name, organization, contacts, customer_type, billing_method, primary_budget_number, owner, shared_with, is_archived): 34 self.id = id 35 self.name = name 36 self.organization = organization 37 self.contacts = contacts 38 self.customer_type = customer_type 39 self.billing_method = billing_method 40 self.primary_budget_number = primary_budget_number 41 self.owner = owner 42 self.shared_with = shared_with 43 self.is_archived = is_archived 44 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class BillingAccount.
48 def to_dict(self) -> dict[str, Any]: 49 id = self.id 50 51 name = self.name 52 53 organization = self.organization 54 55 contacts = [] 56 for contacts_item_data in self.contacts: 57 contacts_item = contacts_item_data.to_dict() 58 contacts.append(contacts_item) 59 60 customer_type = self.customer_type.value 61 62 billing_method = self.billing_method.value 63 64 primary_budget_number = self.primary_budget_number 65 66 owner = self.owner 67 68 shared_with = self.shared_with 69 70 is_archived = self.is_archived 71 72 field_dict: dict[str, Any] = {} 73 field_dict.update(self.additional_properties) 74 field_dict.update( 75 { 76 "id": id, 77 "name": name, 78 "organization": organization, 79 "contacts": contacts, 80 "customerType": customer_type, 81 "billingMethod": billing_method, 82 "primaryBudgetNumber": primary_budget_number, 83 "owner": owner, 84 "sharedWith": shared_with, 85 "isArchived": is_archived, 86 } 87 ) 88 89 return field_dict
91 @classmethod 92 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 93 from ..models.contact import Contact 94 95 d = dict(src_dict) 96 id = d.pop("id") 97 98 name = d.pop("name") 99 100 organization = d.pop("organization") 101 102 contacts = [] 103 _contacts = d.pop("contacts") 104 for contacts_item_data in _contacts: 105 contacts_item = Contact.from_dict(contacts_item_data) 106 107 contacts.append(contacts_item) 108 109 customer_type = CustomerType(d.pop("customerType")) 110 111 billing_method = BillingMethod(d.pop("billingMethod")) 112 113 primary_budget_number = d.pop("primaryBudgetNumber") 114 115 owner = d.pop("owner") 116 117 shared_with = cast(list[str], d.pop("sharedWith")) 118 119 is_archived = d.pop("isArchived") 120 121 billing_account = cls( 122 id=id, 123 name=name, 124 organization=organization, 125 contacts=contacts, 126 customer_type=customer_type, 127 billing_method=billing_method, 128 primary_budget_number=primary_budget_number, 129 owner=owner, 130 shared_with=shared_with, 131 is_archived=is_archived, 132 ) 133 134 billing_account.additional_properties = d 135 return billing_account
20@_attrs_define 21class BillingAccountRequest: 22 """ 23 Attributes: 24 name (str): 25 contacts (list[Contact]): 26 customer_type (CustomerType): 27 billing_method (BillingMethod): 28 primary_budget_number (str): 29 owner (str): 30 shared_with (list[str]): 31 """ 32 33 name: str 34 contacts: list[Contact] 35 customer_type: CustomerType 36 billing_method: BillingMethod 37 primary_budget_number: str 38 owner: str 39 shared_with: list[str] 40 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 41 42 def to_dict(self) -> dict[str, Any]: 43 name = self.name 44 45 contacts = [] 46 for contacts_item_data in self.contacts: 47 contacts_item = contacts_item_data.to_dict() 48 contacts.append(contacts_item) 49 50 customer_type = self.customer_type.value 51 52 billing_method = self.billing_method.value 53 54 primary_budget_number = self.primary_budget_number 55 56 owner = self.owner 57 58 shared_with = self.shared_with 59 60 field_dict: dict[str, Any] = {} 61 field_dict.update(self.additional_properties) 62 field_dict.update( 63 { 64 "name": name, 65 "contacts": contacts, 66 "customerType": customer_type, 67 "billingMethod": billing_method, 68 "primaryBudgetNumber": primary_budget_number, 69 "owner": owner, 70 "sharedWith": shared_with, 71 } 72 ) 73 74 return field_dict 75 76 @classmethod 77 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 78 from ..models.contact import Contact 79 80 d = dict(src_dict) 81 name = d.pop("name") 82 83 contacts = [] 84 _contacts = d.pop("contacts") 85 for contacts_item_data in _contacts: 86 contacts_item = Contact.from_dict(contacts_item_data) 87 88 contacts.append(contacts_item) 89 90 customer_type = CustomerType(d.pop("customerType")) 91 92 billing_method = BillingMethod(d.pop("billingMethod")) 93 94 primary_budget_number = d.pop("primaryBudgetNumber") 95 96 owner = d.pop("owner") 97 98 shared_with = cast(list[str], d.pop("sharedWith")) 99 100 billing_account_request = cls( 101 name=name, 102 contacts=contacts, 103 customer_type=customer_type, 104 billing_method=billing_method, 105 primary_budget_number=primary_budget_number, 106 owner=owner, 107 shared_with=shared_with, 108 ) 109 110 billing_account_request.additional_properties = d 111 return billing_account_request 112 113 @property 114 def additional_keys(self) -> list[str]: 115 return list(self.additional_properties.keys()) 116 117 def __getitem__(self, key: str) -> Any: 118 return self.additional_properties[key] 119 120 def __setitem__(self, key: str, value: Any) -> None: 121 self.additional_properties[key] = value 122 123 def __delitem__(self, key: str) -> None: 124 del self.additional_properties[key] 125 126 def __contains__(self, key: str) -> bool: 127 return key in self.additional_properties
Attributes:
- name (str):
- contacts (list[Contact]):
- customer_type (CustomerType):
- billing_method (BillingMethod):
- primary_budget_number (str):
- owner (str):
- shared_with (list[str]):
30def __init__(self, name, contacts, customer_type, billing_method, primary_budget_number, owner, shared_with): 31 self.name = name 32 self.contacts = contacts 33 self.customer_type = customer_type 34 self.billing_method = billing_method 35 self.primary_budget_number = primary_budget_number 36 self.owner = owner 37 self.shared_with = shared_with 38 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class BillingAccountRequest.
42 def to_dict(self) -> dict[str, Any]: 43 name = self.name 44 45 contacts = [] 46 for contacts_item_data in self.contacts: 47 contacts_item = contacts_item_data.to_dict() 48 contacts.append(contacts_item) 49 50 customer_type = self.customer_type.value 51 52 billing_method = self.billing_method.value 53 54 primary_budget_number = self.primary_budget_number 55 56 owner = self.owner 57 58 shared_with = self.shared_with 59 60 field_dict: dict[str, Any] = {} 61 field_dict.update(self.additional_properties) 62 field_dict.update( 63 { 64 "name": name, 65 "contacts": contacts, 66 "customerType": customer_type, 67 "billingMethod": billing_method, 68 "primaryBudgetNumber": primary_budget_number, 69 "owner": owner, 70 "sharedWith": shared_with, 71 } 72 ) 73 74 return field_dict
76 @classmethod 77 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 78 from ..models.contact import Contact 79 80 d = dict(src_dict) 81 name = d.pop("name") 82 83 contacts = [] 84 _contacts = d.pop("contacts") 85 for contacts_item_data in _contacts: 86 contacts_item = Contact.from_dict(contacts_item_data) 87 88 contacts.append(contacts_item) 89 90 customer_type = CustomerType(d.pop("customerType")) 91 92 billing_method = BillingMethod(d.pop("billingMethod")) 93 94 primary_budget_number = d.pop("primaryBudgetNumber") 95 96 owner = d.pop("owner") 97 98 shared_with = cast(list[str], d.pop("sharedWith")) 99 100 billing_account_request = cls( 101 name=name, 102 contacts=contacts, 103 customer_type=customer_type, 104 billing_method=billing_method, 105 primary_budget_number=primary_budget_number, 106 owner=owner, 107 shared_with=shared_with, 108 ) 109 110 billing_account_request.additional_properties = d 111 return billing_account_request
5class BillingMethod(str, Enum): 6 BUDGET_NUMBER = "BUDGET_NUMBER" 7 CREDIT = "CREDIT" 8 PURCHASE_ORDER = "PURCHASE_ORDER" 9 UNKNOWN = "UNKNOWN" 10 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 11 12 def __str__(self) -> str: 13 return str(self.value) 14 15 @classmethod 16 def _missing_(cls, number): 17 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
5class BudgetPeriod(str, Enum): 6 ANNUALLY = "ANNUALLY" 7 MONTHLY = "MONTHLY" 8 QUARTERLY = "QUARTERLY" 9 UNKNOWN = "UNKNOWN" 10 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 11 12 def __str__(self) -> str: 13 return str(self.value) 14 15 @classmethod 16 def _missing_(cls, number): 17 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
13@_attrs_define 14class CalculatePipelineCostRequest: 15 """ """ 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 calculate_pipeline_cost_request = cls() 29 30 calculate_pipeline_cost_request.additional_properties = d 31 return calculate_pipeline_cost_request 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
13@_attrs_define 14class ClassificationInput: 15 """ 16 Attributes: 17 name (str): 18 description (str): 19 requirement_ids (list[str]): 20 """ 21 22 name: str 23 description: str 24 requirement_ids: list[str] 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 name = self.name 29 30 description = self.description 31 32 requirement_ids = self.requirement_ids 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "name": name, 39 "description": description, 40 "requirementIds": requirement_ids, 41 } 42 ) 43 44 return field_dict 45 46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 name = d.pop("name") 50 51 description = d.pop("description") 52 53 requirement_ids = cast(list[str], d.pop("requirementIds")) 54 55 classification_input = cls( 56 name=name, 57 description=description, 58 requirement_ids=requirement_ids, 59 ) 60 61 classification_input.additional_properties = d 62 return classification_input 63 64 @property 65 def additional_keys(self) -> list[str]: 66 return list(self.additional_properties.keys()) 67 68 def __getitem__(self, key: str) -> Any: 69 return self.additional_properties[key] 70 71 def __setitem__(self, key: str, value: Any) -> None: 72 self.additional_properties[key] = value 73 74 def __delitem__(self, key: str) -> None: 75 del self.additional_properties[key] 76 77 def __contains__(self, key: str) -> bool: 78 return key in self.additional_properties
Attributes:
- name (str):
- description (str):
- requirement_ids (list[str]):
26def __init__(self, name, description, requirement_ids): 27 self.name = name 28 self.description = description 29 self.requirement_ids = requirement_ids 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ClassificationInput.
27 def to_dict(self) -> dict[str, Any]: 28 name = self.name 29 30 description = self.description 31 32 requirement_ids = self.requirement_ids 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "name": name, 39 "description": description, 40 "requirementIds": requirement_ids, 41 } 42 ) 43 44 return field_dict
46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 name = d.pop("name") 50 51 description = d.pop("description") 52 53 requirement_ids = cast(list[str], d.pop("requirementIds")) 54 55 classification_input = cls( 56 name=name, 57 description=description, 58 requirement_ids=requirement_ids, 59 ) 60 61 classification_input.additional_properties = d 62 return classification_input
16@_attrs_define 17class CloudAccount: 18 """ 19 Attributes: 20 account_type (CloudAccountType): Type of cloud account (Hosted by Cirro, or Bring your own account) 21 account_id (str | Unset): AWS Account ID 22 account_name (str | Unset): Name used to describe the account, useful when the account hosts multiple projects 23 region_name (str | Unset): AWS Region Code (defaults to region of Cirro app) Example: us-west-2. 24 """ 25 26 account_type: CloudAccountType 27 account_id: str | Unset = UNSET 28 account_name: str | Unset = UNSET 29 region_name: str | Unset = UNSET 30 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 31 32 def to_dict(self) -> dict[str, Any]: 33 account_type = self.account_type.value 34 35 account_id = self.account_id 36 37 account_name = self.account_name 38 39 region_name = self.region_name 40 41 field_dict: dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update( 44 { 45 "accountType": account_type, 46 } 47 ) 48 if account_id is not UNSET: 49 field_dict["accountId"] = account_id 50 if account_name is not UNSET: 51 field_dict["accountName"] = account_name 52 if region_name is not UNSET: 53 field_dict["regionName"] = region_name 54 55 return field_dict 56 57 @classmethod 58 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 59 d = dict(src_dict) 60 account_type = CloudAccountType(d.pop("accountType")) 61 62 account_id = d.pop("accountId", UNSET) 63 64 account_name = d.pop("accountName", UNSET) 65 66 region_name = d.pop("regionName", UNSET) 67 68 cloud_account = cls( 69 account_type=account_type, 70 account_id=account_id, 71 account_name=account_name, 72 region_name=region_name, 73 ) 74 75 cloud_account.additional_properties = d 76 return cloud_account 77 78 @property 79 def additional_keys(self) -> list[str]: 80 return list(self.additional_properties.keys()) 81 82 def __getitem__(self, key: str) -> Any: 83 return self.additional_properties[key] 84 85 def __setitem__(self, key: str, value: Any) -> None: 86 self.additional_properties[key] = value 87 88 def __delitem__(self, key: str) -> None: 89 del self.additional_properties[key] 90 91 def __contains__(self, key: str) -> bool: 92 return key in self.additional_properties
Attributes:
- account_type (CloudAccountType): Type of cloud account (Hosted by Cirro, or Bring your own account)
- account_id (str | Unset): AWS Account ID
- account_name (str | Unset): Name used to describe the account, useful when the account hosts multiple projects
- region_name (str | Unset): AWS Region Code (defaults to region of Cirro app) Example: us-west-2.
27def __init__(self, account_type, account_id=attr_dict['account_id'].default, account_name=attr_dict['account_name'].default, region_name=attr_dict['region_name'].default): 28 self.account_type = account_type 29 self.account_id = account_id 30 self.account_name = account_name 31 self.region_name = region_name 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CloudAccount.
32 def to_dict(self) -> dict[str, Any]: 33 account_type = self.account_type.value 34 35 account_id = self.account_id 36 37 account_name = self.account_name 38 39 region_name = self.region_name 40 41 field_dict: dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update( 44 { 45 "accountType": account_type, 46 } 47 ) 48 if account_id is not UNSET: 49 field_dict["accountId"] = account_id 50 if account_name is not UNSET: 51 field_dict["accountName"] = account_name 52 if region_name is not UNSET: 53 field_dict["regionName"] = region_name 54 55 return field_dict
57 @classmethod 58 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 59 d = dict(src_dict) 60 account_type = CloudAccountType(d.pop("accountType")) 61 62 account_id = d.pop("accountId", UNSET) 63 64 account_name = d.pop("accountName", UNSET) 65 66 region_name = d.pop("regionName", UNSET) 67 68 cloud_account = cls( 69 account_type=account_type, 70 account_id=account_id, 71 account_name=account_name, 72 region_name=region_name, 73 ) 74 75 cloud_account.additional_properties = d 76 return cloud_account
5class CloudAccountType(str, Enum): 6 BYOA = "BYOA" 7 HOSTED = "HOSTED" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
15@_attrs_define 16class ColumnDefinition: 17 """ 18 Attributes: 19 col (str | Unset): Column name in asset file 20 name (str | Unset): User-friendly column name 21 desc (str | Unset): Description of the column 22 """ 23 24 col: str | Unset = UNSET 25 name: str | Unset = UNSET 26 desc: str | Unset = UNSET 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 col = self.col 31 32 name = self.name 33 34 desc = self.desc 35 36 field_dict: dict[str, Any] = {} 37 field_dict.update(self.additional_properties) 38 field_dict.update({}) 39 if col is not UNSET: 40 field_dict["col"] = col 41 if name is not UNSET: 42 field_dict["name"] = name 43 if desc is not UNSET: 44 field_dict["desc"] = desc 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 d = dict(src_dict) 51 col = d.pop("col", UNSET) 52 53 name = d.pop("name", UNSET) 54 55 desc = d.pop("desc", UNSET) 56 57 column_definition = cls( 58 col=col, 59 name=name, 60 desc=desc, 61 ) 62 63 column_definition.additional_properties = d 64 return column_definition 65 66 @property 67 def additional_keys(self) -> list[str]: 68 return list(self.additional_properties.keys()) 69 70 def __getitem__(self, key: str) -> Any: 71 return self.additional_properties[key] 72 73 def __setitem__(self, key: str, value: Any) -> None: 74 self.additional_properties[key] = value 75 76 def __delitem__(self, key: str) -> None: 77 del self.additional_properties[key] 78 79 def __contains__(self, key: str) -> bool: 80 return key in self.additional_properties
Attributes:
- col (str | Unset): Column name in asset file
- name (str | Unset): User-friendly column name
- desc (str | Unset): Description of the column
26def __init__(self, col=attr_dict['col'].default, name=attr_dict['name'].default, desc=attr_dict['desc'].default): 27 self.col = col 28 self.name = name 29 self.desc = desc 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ColumnDefinition.
29 def to_dict(self) -> dict[str, Any]: 30 col = self.col 31 32 name = self.name 33 34 desc = self.desc 35 36 field_dict: dict[str, Any] = {} 37 field_dict.update(self.additional_properties) 38 field_dict.update({}) 39 if col is not UNSET: 40 field_dict["col"] = col 41 if name is not UNSET: 42 field_dict["name"] = name 43 if desc is not UNSET: 44 field_dict["desc"] = desc 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 d = dict(src_dict) 51 col = d.pop("col", UNSET) 52 53 name = d.pop("name", UNSET) 54 55 desc = d.pop("desc", UNSET) 56 57 column_definition = cls( 58 col=col, 59 name=name, 60 desc=desc, 61 ) 62 63 column_definition.additional_properties = d 64 return column_definition
23@_attrs_define 24class ComputeEnvironmentConfiguration: 25 """ 26 Attributes: 27 environment_type (EnvironmentType): The type of compute environment 28 created_at (datetime.datetime): 29 updated_at (datetime.datetime): 30 id (str | Unset): The unique ID of the environment 31 name (str | Unset): The display name of the environment 32 properties (ComputeEnvironmentConfigurationProperties | Unset): Configuration properties passed to the 33 environment 34 agent (Agent | None | Unset): 35 created_by (str | Unset): The user who created the environment 36 """ 37 38 environment_type: EnvironmentType 39 created_at: datetime.datetime 40 updated_at: datetime.datetime 41 id: str | Unset = UNSET 42 name: str | Unset = UNSET 43 properties: ComputeEnvironmentConfigurationProperties | Unset = UNSET 44 agent: Agent | None | Unset = UNSET 45 created_by: str | Unset = UNSET 46 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 47 48 def to_dict(self) -> dict[str, Any]: 49 from ..models.agent import Agent 50 51 environment_type = self.environment_type.value 52 53 created_at = self.created_at.isoformat() 54 55 updated_at = self.updated_at.isoformat() 56 57 id = self.id 58 59 name = self.name 60 61 properties: dict[str, Any] | Unset = UNSET 62 if not isinstance(self.properties, Unset): 63 properties = self.properties.to_dict() 64 65 agent: dict[str, Any] | None | Unset 66 if isinstance(self.agent, Unset): 67 agent = UNSET 68 elif isinstance(self.agent, Agent): 69 agent = self.agent.to_dict() 70 else: 71 agent = self.agent 72 73 created_by = self.created_by 74 75 field_dict: dict[str, Any] = {} 76 field_dict.update(self.additional_properties) 77 field_dict.update( 78 { 79 "environmentType": environment_type, 80 "createdAt": created_at, 81 "updatedAt": updated_at, 82 } 83 ) 84 if id is not UNSET: 85 field_dict["id"] = id 86 if name is not UNSET: 87 field_dict["name"] = name 88 if properties is not UNSET: 89 field_dict["properties"] = properties 90 if agent is not UNSET: 91 field_dict["agent"] = agent 92 if created_by is not UNSET: 93 field_dict["createdBy"] = created_by 94 95 return field_dict 96 97 @classmethod 98 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 99 from ..models.agent import Agent 100 from ..models.compute_environment_configuration_properties import ComputeEnvironmentConfigurationProperties 101 102 d = dict(src_dict) 103 environment_type = EnvironmentType(d.pop("environmentType")) 104 105 created_at = isoparse(d.pop("createdAt")) 106 107 updated_at = isoparse(d.pop("updatedAt")) 108 109 id = d.pop("id", UNSET) 110 111 name = d.pop("name", UNSET) 112 113 _properties = d.pop("properties", UNSET) 114 properties: ComputeEnvironmentConfigurationProperties | Unset 115 if isinstance(_properties, Unset): 116 properties = UNSET 117 else: 118 properties = ComputeEnvironmentConfigurationProperties.from_dict(_properties) 119 120 def _parse_agent(data: object) -> Agent | None | Unset: 121 if data is None: 122 return data 123 if isinstance(data, Unset): 124 return data 125 try: 126 if not isinstance(data, dict): 127 raise TypeError() 128 agent_type_1 = Agent.from_dict(data) 129 130 return agent_type_1 131 except (TypeError, ValueError, AttributeError, KeyError): 132 pass 133 return cast(Agent | None | Unset, data) 134 135 agent = _parse_agent(d.pop("agent", UNSET)) 136 137 created_by = d.pop("createdBy", UNSET) 138 139 compute_environment_configuration = cls( 140 environment_type=environment_type, 141 created_at=created_at, 142 updated_at=updated_at, 143 id=id, 144 name=name, 145 properties=properties, 146 agent=agent, 147 created_by=created_by, 148 ) 149 150 compute_environment_configuration.additional_properties = d 151 return compute_environment_configuration 152 153 @property 154 def additional_keys(self) -> list[str]: 155 return list(self.additional_properties.keys()) 156 157 def __getitem__(self, key: str) -> Any: 158 return self.additional_properties[key] 159 160 def __setitem__(self, key: str, value: Any) -> None: 161 self.additional_properties[key] = value 162 163 def __delitem__(self, key: str) -> None: 164 del self.additional_properties[key] 165 166 def __contains__(self, key: str) -> bool: 167 return key in self.additional_properties
Attributes:
- environment_type (EnvironmentType): The type of compute environment
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- id (str | Unset): The unique ID of the environment
- name (str | Unset): The display name of the environment
- properties (ComputeEnvironmentConfigurationProperties | Unset): Configuration properties passed to the environment
- agent (Agent | None | Unset):
- created_by (str | Unset): The user who created the environment
31def __init__(self, environment_type, created_at, updated_at, id=attr_dict['id'].default, name=attr_dict['name'].default, properties=attr_dict['properties'].default, agent=attr_dict['agent'].default, created_by=attr_dict['created_by'].default): 32 self.environment_type = environment_type 33 self.created_at = created_at 34 self.updated_at = updated_at 35 self.id = id 36 self.name = name 37 self.properties = properties 38 self.agent = agent 39 self.created_by = created_by 40 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ComputeEnvironmentConfiguration.
48 def to_dict(self) -> dict[str, Any]: 49 from ..models.agent import Agent 50 51 environment_type = self.environment_type.value 52 53 created_at = self.created_at.isoformat() 54 55 updated_at = self.updated_at.isoformat() 56 57 id = self.id 58 59 name = self.name 60 61 properties: dict[str, Any] | Unset = UNSET 62 if not isinstance(self.properties, Unset): 63 properties = self.properties.to_dict() 64 65 agent: dict[str, Any] | None | Unset 66 if isinstance(self.agent, Unset): 67 agent = UNSET 68 elif isinstance(self.agent, Agent): 69 agent = self.agent.to_dict() 70 else: 71 agent = self.agent 72 73 created_by = self.created_by 74 75 field_dict: dict[str, Any] = {} 76 field_dict.update(self.additional_properties) 77 field_dict.update( 78 { 79 "environmentType": environment_type, 80 "createdAt": created_at, 81 "updatedAt": updated_at, 82 } 83 ) 84 if id is not UNSET: 85 field_dict["id"] = id 86 if name is not UNSET: 87 field_dict["name"] = name 88 if properties is not UNSET: 89 field_dict["properties"] = properties 90 if agent is not UNSET: 91 field_dict["agent"] = agent 92 if created_by is not UNSET: 93 field_dict["createdBy"] = created_by 94 95 return field_dict
97 @classmethod 98 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 99 from ..models.agent import Agent 100 from ..models.compute_environment_configuration_properties import ComputeEnvironmentConfigurationProperties 101 102 d = dict(src_dict) 103 environment_type = EnvironmentType(d.pop("environmentType")) 104 105 created_at = isoparse(d.pop("createdAt")) 106 107 updated_at = isoparse(d.pop("updatedAt")) 108 109 id = d.pop("id", UNSET) 110 111 name = d.pop("name", UNSET) 112 113 _properties = d.pop("properties", UNSET) 114 properties: ComputeEnvironmentConfigurationProperties | Unset 115 if isinstance(_properties, Unset): 116 properties = UNSET 117 else: 118 properties = ComputeEnvironmentConfigurationProperties.from_dict(_properties) 119 120 def _parse_agent(data: object) -> Agent | None | Unset: 121 if data is None: 122 return data 123 if isinstance(data, Unset): 124 return data 125 try: 126 if not isinstance(data, dict): 127 raise TypeError() 128 agent_type_1 = Agent.from_dict(data) 129 130 return agent_type_1 131 except (TypeError, ValueError, AttributeError, KeyError): 132 pass 133 return cast(Agent | None | Unset, data) 134 135 agent = _parse_agent(d.pop("agent", UNSET)) 136 137 created_by = d.pop("createdBy", UNSET) 138 139 compute_environment_configuration = cls( 140 environment_type=environment_type, 141 created_at=created_at, 142 updated_at=updated_at, 143 id=id, 144 name=name, 145 properties=properties, 146 agent=agent, 147 created_by=created_by, 148 ) 149 150 compute_environment_configuration.additional_properties = d 151 return compute_environment_configuration
21@_attrs_define 22class ComputeEnvironmentConfigurationInput: 23 """ 24 Attributes: 25 name (str): 26 agent_id (None | str | Unset): 27 properties (ComputeEnvironmentConfigurationInputProperties | None | Unset): 28 """ 29 30 name: str 31 agent_id: None | str | Unset = UNSET 32 properties: ComputeEnvironmentConfigurationInputProperties | None | Unset = UNSET 33 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 34 35 def to_dict(self) -> dict[str, Any]: 36 from ..models.compute_environment_configuration_input_properties import ( 37 ComputeEnvironmentConfigurationInputProperties, 38 ) 39 40 name = self.name 41 42 agent_id: None | str | Unset 43 if isinstance(self.agent_id, Unset): 44 agent_id = UNSET 45 else: 46 agent_id = self.agent_id 47 48 properties: dict[str, Any] | None | Unset 49 if isinstance(self.properties, Unset): 50 properties = UNSET 51 elif isinstance(self.properties, ComputeEnvironmentConfigurationInputProperties): 52 properties = self.properties.to_dict() 53 else: 54 properties = self.properties 55 56 field_dict: dict[str, Any] = {} 57 field_dict.update(self.additional_properties) 58 field_dict.update( 59 { 60 "name": name, 61 } 62 ) 63 if agent_id is not UNSET: 64 field_dict["agentId"] = agent_id 65 if properties is not UNSET: 66 field_dict["properties"] = properties 67 68 return field_dict 69 70 @classmethod 71 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 72 from ..models.compute_environment_configuration_input_properties import ( 73 ComputeEnvironmentConfigurationInputProperties, 74 ) 75 76 d = dict(src_dict) 77 name = d.pop("name") 78 79 def _parse_agent_id(data: object) -> None | str | Unset: 80 if data is None: 81 return data 82 if isinstance(data, Unset): 83 return data 84 return cast(None | str | Unset, data) 85 86 agent_id = _parse_agent_id(d.pop("agentId", UNSET)) 87 88 def _parse_properties(data: object) -> ComputeEnvironmentConfigurationInputProperties | None | Unset: 89 if data is None: 90 return data 91 if isinstance(data, Unset): 92 return data 93 try: 94 if not isinstance(data, dict): 95 raise TypeError() 96 properties_type_0 = ComputeEnvironmentConfigurationInputProperties.from_dict(data) 97 98 return properties_type_0 99 except (TypeError, ValueError, AttributeError, KeyError): 100 pass 101 return cast(ComputeEnvironmentConfigurationInputProperties | None | Unset, data) 102 103 properties = _parse_properties(d.pop("properties", UNSET)) 104 105 compute_environment_configuration_input = cls( 106 name=name, 107 agent_id=agent_id, 108 properties=properties, 109 ) 110 111 compute_environment_configuration_input.additional_properties = d 112 return compute_environment_configuration_input 113 114 @property 115 def additional_keys(self) -> list[str]: 116 return list(self.additional_properties.keys()) 117 118 def __getitem__(self, key: str) -> Any: 119 return self.additional_properties[key] 120 121 def __setitem__(self, key: str, value: Any) -> None: 122 self.additional_properties[key] = value 123 124 def __delitem__(self, key: str) -> None: 125 del self.additional_properties[key] 126 127 def __contains__(self, key: str) -> bool: 128 return key in self.additional_properties
Attributes:
- name (str):
- agent_id (None | str | Unset):
- properties (ComputeEnvironmentConfigurationInputProperties | None | Unset):
26def __init__(self, name, agent_id=attr_dict['agent_id'].default, properties=attr_dict['properties'].default): 27 self.name = name 28 self.agent_id = agent_id 29 self.properties = properties 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ComputeEnvironmentConfigurationInput.
35 def to_dict(self) -> dict[str, Any]: 36 from ..models.compute_environment_configuration_input_properties import ( 37 ComputeEnvironmentConfigurationInputProperties, 38 ) 39 40 name = self.name 41 42 agent_id: None | str | Unset 43 if isinstance(self.agent_id, Unset): 44 agent_id = UNSET 45 else: 46 agent_id = self.agent_id 47 48 properties: dict[str, Any] | None | Unset 49 if isinstance(self.properties, Unset): 50 properties = UNSET 51 elif isinstance(self.properties, ComputeEnvironmentConfigurationInputProperties): 52 properties = self.properties.to_dict() 53 else: 54 properties = self.properties 55 56 field_dict: dict[str, Any] = {} 57 field_dict.update(self.additional_properties) 58 field_dict.update( 59 { 60 "name": name, 61 } 62 ) 63 if agent_id is not UNSET: 64 field_dict["agentId"] = agent_id 65 if properties is not UNSET: 66 field_dict["properties"] = properties 67 68 return field_dict
70 @classmethod 71 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 72 from ..models.compute_environment_configuration_input_properties import ( 73 ComputeEnvironmentConfigurationInputProperties, 74 ) 75 76 d = dict(src_dict) 77 name = d.pop("name") 78 79 def _parse_agent_id(data: object) -> None | str | Unset: 80 if data is None: 81 return data 82 if isinstance(data, Unset): 83 return data 84 return cast(None | str | Unset, data) 85 86 agent_id = _parse_agent_id(d.pop("agentId", UNSET)) 87 88 def _parse_properties(data: object) -> ComputeEnvironmentConfigurationInputProperties | None | Unset: 89 if data is None: 90 return data 91 if isinstance(data, Unset): 92 return data 93 try: 94 if not isinstance(data, dict): 95 raise TypeError() 96 properties_type_0 = ComputeEnvironmentConfigurationInputProperties.from_dict(data) 97 98 return properties_type_0 99 except (TypeError, ValueError, AttributeError, KeyError): 100 pass 101 return cast(ComputeEnvironmentConfigurationInputProperties | None | Unset, data) 102 103 properties = _parse_properties(d.pop("properties", UNSET)) 104 105 compute_environment_configuration_input = cls( 106 name=name, 107 agent_id=agent_id, 108 properties=properties, 109 ) 110 111 compute_environment_configuration_input.additional_properties = d 112 return compute_environment_configuration_input
13@_attrs_define 14class ComputeEnvironmentConfigurationInputProperties: 15 """ """ 16 17 additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 compute_environment_configuration_input_properties = cls() 29 30 compute_environment_configuration_input_properties.additional_properties = d 31 return compute_environment_configuration_input_properties 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> str: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: str) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
Method generated by attrs for class ComputeEnvironmentConfigurationInputProperties.
25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 compute_environment_configuration_input_properties = cls() 29 30 compute_environment_configuration_input_properties.additional_properties = d 31 return compute_environment_configuration_input_properties
13@_attrs_define 14class ComputeEnvironmentConfigurationProperties: 15 """Configuration properties passed to the environment""" 16 17 additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 compute_environment_configuration_properties = cls() 29 30 compute_environment_configuration_properties.additional_properties = d 31 return compute_environment_configuration_properties 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> str: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: str) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
Configuration properties passed to the environment
Method generated by attrs for class ComputeEnvironmentConfigurationProperties.
13@_attrs_define 14class Contact: 15 """ 16 Attributes: 17 name (str): 18 organization (str): 19 email (str): 20 phone (str): 21 """ 22 23 name: str 24 organization: str 25 email: str 26 phone: str 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 name = self.name 31 32 organization = self.organization 33 34 email = self.email 35 36 phone = self.phone 37 38 field_dict: dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "name": name, 43 "organization": organization, 44 "email": email, 45 "phone": phone, 46 } 47 ) 48 49 return field_dict 50 51 @classmethod 52 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 53 d = dict(src_dict) 54 name = d.pop("name") 55 56 organization = d.pop("organization") 57 58 email = d.pop("email") 59 60 phone = d.pop("phone") 61 62 contact = cls( 63 name=name, 64 organization=organization, 65 email=email, 66 phone=phone, 67 ) 68 69 contact.additional_properties = d 70 return contact 71 72 @property 73 def additional_keys(self) -> list[str]: 74 return list(self.additional_properties.keys()) 75 76 def __getitem__(self, key: str) -> Any: 77 return self.additional_properties[key] 78 79 def __setitem__(self, key: str, value: Any) -> None: 80 self.additional_properties[key] = value 81 82 def __delitem__(self, key: str) -> None: 83 del self.additional_properties[key] 84 85 def __contains__(self, key: str) -> bool: 86 return key in self.additional_properties
Attributes:
- name (str):
- organization (str):
- email (str):
- phone (str):
27def __init__(self, name, organization, email, phone): 28 self.name = name 29 self.organization = organization 30 self.email = email 31 self.phone = phone 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Contact.
29 def to_dict(self) -> dict[str, Any]: 30 name = self.name 31 32 organization = self.organization 33 34 email = self.email 35 36 phone = self.phone 37 38 field_dict: dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "name": name, 43 "organization": organization, 44 "email": email, 45 "phone": phone, 46 } 47 ) 48 49 return field_dict
51 @classmethod 52 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 53 d = dict(src_dict) 54 name = d.pop("name") 55 56 organization = d.pop("organization") 57 58 email = d.pop("email") 59 60 phone = d.pop("phone") 61 62 contact = cls( 63 name=name, 64 organization=organization, 65 email=email, 66 phone=phone, 67 ) 68 69 contact.additional_properties = d 70 return contact
13@_attrs_define 14class ContactInput: 15 """ 16 Attributes: 17 title (str): 18 description (str): 19 name (str): 20 phone (str): 21 email (str): 22 """ 23 24 title: str 25 description: str 26 name: str 27 phone: str 28 email: str 29 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 30 31 def to_dict(self) -> dict[str, Any]: 32 title = self.title 33 34 description = self.description 35 36 name = self.name 37 38 phone = self.phone 39 40 email = self.email 41 42 field_dict: dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "title": title, 47 "description": description, 48 "name": name, 49 "phone": phone, 50 "email": email, 51 } 52 ) 53 54 return field_dict 55 56 @classmethod 57 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 58 d = dict(src_dict) 59 title = d.pop("title") 60 61 description = d.pop("description") 62 63 name = d.pop("name") 64 65 phone = d.pop("phone") 66 67 email = d.pop("email") 68 69 contact_input = cls( 70 title=title, 71 description=description, 72 name=name, 73 phone=phone, 74 email=email, 75 ) 76 77 contact_input.additional_properties = d 78 return contact_input 79 80 @property 81 def additional_keys(self) -> list[str]: 82 return list(self.additional_properties.keys()) 83 84 def __getitem__(self, key: str) -> Any: 85 return self.additional_properties[key] 86 87 def __setitem__(self, key: str, value: Any) -> None: 88 self.additional_properties[key] = value 89 90 def __delitem__(self, key: str) -> None: 91 del self.additional_properties[key] 92 93 def __contains__(self, key: str) -> bool: 94 return key in self.additional_properties
Attributes:
- title (str):
- description (str):
- name (str):
- phone (str):
- email (str):
28def __init__(self, title, description, name, phone, email): 29 self.title = title 30 self.description = description 31 self.name = name 32 self.phone = phone 33 self.email = email 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ContactInput.
31 def to_dict(self) -> dict[str, Any]: 32 title = self.title 33 34 description = self.description 35 36 name = self.name 37 38 phone = self.phone 39 40 email = self.email 41 42 field_dict: dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "title": title, 47 "description": description, 48 "name": name, 49 "phone": phone, 50 "email": email, 51 } 52 ) 53 54 return field_dict
56 @classmethod 57 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 58 d = dict(src_dict) 59 title = d.pop("title") 60 61 description = d.pop("description") 62 63 name = d.pop("name") 64 65 phone = d.pop("phone") 66 67 email = d.pop("email") 68 69 contact_input = cls( 70 title=title, 71 description=description, 72 name=name, 73 phone=phone, 74 email=email, 75 ) 76 77 contact_input.additional_properties = d 78 return contact_input
20@_attrs_define 21class CostResponse: 22 """ 23 Attributes: 24 total_cost (float | Unset): Total cost 25 groups (list[GroupCost] | Unset): Costs grouped by the task status 26 tasks (list[TaskCost] | Unset): Costs for each workflow task 27 is_estimate (bool | Unset): Whether this is an estimated cost 28 """ 29 30 total_cost: float | Unset = UNSET 31 groups: list[GroupCost] | Unset = UNSET 32 tasks: list[TaskCost] | Unset = UNSET 33 is_estimate: bool | Unset = UNSET 34 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 35 36 def to_dict(self) -> dict[str, Any]: 37 total_cost = self.total_cost 38 39 groups: list[dict[str, Any]] | Unset = UNSET 40 if not isinstance(self.groups, Unset): 41 groups = [] 42 for groups_item_data in self.groups: 43 groups_item = groups_item_data.to_dict() 44 groups.append(groups_item) 45 46 tasks: list[dict[str, Any]] | Unset = UNSET 47 if not isinstance(self.tasks, Unset): 48 tasks = [] 49 for tasks_item_data in self.tasks: 50 tasks_item = tasks_item_data.to_dict() 51 tasks.append(tasks_item) 52 53 is_estimate = self.is_estimate 54 55 field_dict: dict[str, Any] = {} 56 field_dict.update(self.additional_properties) 57 field_dict.update({}) 58 if total_cost is not UNSET: 59 field_dict["totalCost"] = total_cost 60 if groups is not UNSET: 61 field_dict["groups"] = groups 62 if tasks is not UNSET: 63 field_dict["tasks"] = tasks 64 if is_estimate is not UNSET: 65 field_dict["isEstimate"] = is_estimate 66 67 return field_dict 68 69 @classmethod 70 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 71 from ..models.group_cost import GroupCost 72 from ..models.task_cost import TaskCost 73 74 d = dict(src_dict) 75 total_cost = d.pop("totalCost", UNSET) 76 77 _groups = d.pop("groups", UNSET) 78 groups: list[GroupCost] | Unset = UNSET 79 if _groups is not UNSET: 80 groups = [] 81 for groups_item_data in _groups: 82 groups_item = GroupCost.from_dict(groups_item_data) 83 84 groups.append(groups_item) 85 86 _tasks = d.pop("tasks", UNSET) 87 tasks: list[TaskCost] | Unset = UNSET 88 if _tasks is not UNSET: 89 tasks = [] 90 for tasks_item_data in _tasks: 91 tasks_item = TaskCost.from_dict(tasks_item_data) 92 93 tasks.append(tasks_item) 94 95 is_estimate = d.pop("isEstimate", UNSET) 96 97 cost_response = cls( 98 total_cost=total_cost, 99 groups=groups, 100 tasks=tasks, 101 is_estimate=is_estimate, 102 ) 103 104 cost_response.additional_properties = d 105 return cost_response 106 107 @property 108 def additional_keys(self) -> list[str]: 109 return list(self.additional_properties.keys()) 110 111 def __getitem__(self, key: str) -> Any: 112 return self.additional_properties[key] 113 114 def __setitem__(self, key: str, value: Any) -> None: 115 self.additional_properties[key] = value 116 117 def __delitem__(self, key: str) -> None: 118 del self.additional_properties[key] 119 120 def __contains__(self, key: str) -> bool: 121 return key in self.additional_properties
Attributes:
- total_cost (float | Unset): Total cost
- groups (list[GroupCost] | Unset): Costs grouped by the task status
- tasks (list[TaskCost] | Unset): Costs for each workflow task
- is_estimate (bool | Unset): Whether this is an estimated cost
27def __init__(self, total_cost=attr_dict['total_cost'].default, groups=attr_dict['groups'].default, tasks=attr_dict['tasks'].default, is_estimate=attr_dict['is_estimate'].default): 28 self.total_cost = total_cost 29 self.groups = groups 30 self.tasks = tasks 31 self.is_estimate = is_estimate 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CostResponse.
36 def to_dict(self) -> dict[str, Any]: 37 total_cost = self.total_cost 38 39 groups: list[dict[str, Any]] | Unset = UNSET 40 if not isinstance(self.groups, Unset): 41 groups = [] 42 for groups_item_data in self.groups: 43 groups_item = groups_item_data.to_dict() 44 groups.append(groups_item) 45 46 tasks: list[dict[str, Any]] | Unset = UNSET 47 if not isinstance(self.tasks, Unset): 48 tasks = [] 49 for tasks_item_data in self.tasks: 50 tasks_item = tasks_item_data.to_dict() 51 tasks.append(tasks_item) 52 53 is_estimate = self.is_estimate 54 55 field_dict: dict[str, Any] = {} 56 field_dict.update(self.additional_properties) 57 field_dict.update({}) 58 if total_cost is not UNSET: 59 field_dict["totalCost"] = total_cost 60 if groups is not UNSET: 61 field_dict["groups"] = groups 62 if tasks is not UNSET: 63 field_dict["tasks"] = tasks 64 if is_estimate is not UNSET: 65 field_dict["isEstimate"] = is_estimate 66 67 return field_dict
69 @classmethod 70 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 71 from ..models.group_cost import GroupCost 72 from ..models.task_cost import TaskCost 73 74 d = dict(src_dict) 75 total_cost = d.pop("totalCost", UNSET) 76 77 _groups = d.pop("groups", UNSET) 78 groups: list[GroupCost] | Unset = UNSET 79 if _groups is not UNSET: 80 groups = [] 81 for groups_item_data in _groups: 82 groups_item = GroupCost.from_dict(groups_item_data) 83 84 groups.append(groups_item) 85 86 _tasks = d.pop("tasks", UNSET) 87 tasks: list[TaskCost] | Unset = UNSET 88 if _tasks is not UNSET: 89 tasks = [] 90 for tasks_item_data in _tasks: 91 tasks_item = TaskCost.from_dict(tasks_item_data) 92 93 tasks.append(tasks_item) 94 95 is_estimate = d.pop("isEstimate", UNSET) 96 97 cost_response = cls( 98 total_cost=total_cost, 99 groups=groups, 100 tasks=tasks, 101 is_estimate=is_estimate, 102 ) 103 104 cost_response.additional_properties = d 105 return cost_response
15@_attrs_define 16class CreateNotebookInstanceRequest: 17 """ 18 Attributes: 19 name (str): 20 instance_type (str): AWS EC2 Instance Type (see list of available options) Example: ml.t3.medium. 21 accelerator_types (list[str]): 22 volume_size_gb (int): 23 git_repositories (list[str] | None | Unset): List of public git repositories to clone into the notebook 24 instance. 25 is_shared_with_project (bool | Unset): Whether the notebook is shared with the project Default: False. 26 """ 27 28 name: str 29 instance_type: str 30 accelerator_types: list[str] 31 volume_size_gb: int 32 git_repositories: list[str] | None | Unset = UNSET 33 is_shared_with_project: bool | Unset = False 34 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 35 36 def to_dict(self) -> dict[str, Any]: 37 name = self.name 38 39 instance_type = self.instance_type 40 41 accelerator_types = self.accelerator_types 42 43 volume_size_gb = self.volume_size_gb 44 45 git_repositories: list[str] | None | Unset 46 if isinstance(self.git_repositories, Unset): 47 git_repositories = UNSET 48 elif isinstance(self.git_repositories, list): 49 git_repositories = self.git_repositories 50 51 else: 52 git_repositories = self.git_repositories 53 54 is_shared_with_project = self.is_shared_with_project 55 56 field_dict: dict[str, Any] = {} 57 field_dict.update(self.additional_properties) 58 field_dict.update( 59 { 60 "name": name, 61 "instanceType": instance_type, 62 "acceleratorTypes": accelerator_types, 63 "volumeSizeGB": volume_size_gb, 64 } 65 ) 66 if git_repositories is not UNSET: 67 field_dict["gitRepositories"] = git_repositories 68 if is_shared_with_project is not UNSET: 69 field_dict["isSharedWithProject"] = is_shared_with_project 70 71 return field_dict 72 73 @classmethod 74 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 75 d = dict(src_dict) 76 name = d.pop("name") 77 78 instance_type = d.pop("instanceType") 79 80 accelerator_types = cast(list[str], d.pop("acceleratorTypes")) 81 82 volume_size_gb = d.pop("volumeSizeGB") 83 84 def _parse_git_repositories(data: object) -> list[str] | None | Unset: 85 if data is None: 86 return data 87 if isinstance(data, Unset): 88 return data 89 try: 90 if not isinstance(data, list): 91 raise TypeError() 92 git_repositories_type_0 = cast(list[str], data) 93 94 return git_repositories_type_0 95 except (TypeError, ValueError, AttributeError, KeyError): 96 pass 97 return cast(list[str] | None | Unset, data) 98 99 git_repositories = _parse_git_repositories(d.pop("gitRepositories", UNSET)) 100 101 is_shared_with_project = d.pop("isSharedWithProject", UNSET) 102 103 create_notebook_instance_request = cls( 104 name=name, 105 instance_type=instance_type, 106 accelerator_types=accelerator_types, 107 volume_size_gb=volume_size_gb, 108 git_repositories=git_repositories, 109 is_shared_with_project=is_shared_with_project, 110 ) 111 112 create_notebook_instance_request.additional_properties = d 113 return create_notebook_instance_request 114 115 @property 116 def additional_keys(self) -> list[str]: 117 return list(self.additional_properties.keys()) 118 119 def __getitem__(self, key: str) -> Any: 120 return self.additional_properties[key] 121 122 def __setitem__(self, key: str, value: Any) -> None: 123 self.additional_properties[key] = value 124 125 def __delitem__(self, key: str) -> None: 126 del self.additional_properties[key] 127 128 def __contains__(self, key: str) -> bool: 129 return key in self.additional_properties
Attributes:
- name (str):
- instance_type (str): AWS EC2 Instance Type (see list of available options) Example: ml.t3.medium.
- accelerator_types (list[str]):
- volume_size_gb (int):
- git_repositories (list[str] | None | Unset): List of public git repositories to clone into the notebook instance.
- is_shared_with_project (bool | Unset): Whether the notebook is shared with the project Default: False.
29def __init__(self, name, instance_type, accelerator_types, volume_size_gb, git_repositories=attr_dict['git_repositories'].default, is_shared_with_project=attr_dict['is_shared_with_project'].default): 30 self.name = name 31 self.instance_type = instance_type 32 self.accelerator_types = accelerator_types 33 self.volume_size_gb = volume_size_gb 34 self.git_repositories = git_repositories 35 self.is_shared_with_project = is_shared_with_project 36 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CreateNotebookInstanceRequest.
36 def to_dict(self) -> dict[str, Any]: 37 name = self.name 38 39 instance_type = self.instance_type 40 41 accelerator_types = self.accelerator_types 42 43 volume_size_gb = self.volume_size_gb 44 45 git_repositories: list[str] | None | Unset 46 if isinstance(self.git_repositories, Unset): 47 git_repositories = UNSET 48 elif isinstance(self.git_repositories, list): 49 git_repositories = self.git_repositories 50 51 else: 52 git_repositories = self.git_repositories 53 54 is_shared_with_project = self.is_shared_with_project 55 56 field_dict: dict[str, Any] = {} 57 field_dict.update(self.additional_properties) 58 field_dict.update( 59 { 60 "name": name, 61 "instanceType": instance_type, 62 "acceleratorTypes": accelerator_types, 63 "volumeSizeGB": volume_size_gb, 64 } 65 ) 66 if git_repositories is not UNSET: 67 field_dict["gitRepositories"] = git_repositories 68 if is_shared_with_project is not UNSET: 69 field_dict["isSharedWithProject"] = is_shared_with_project 70 71 return field_dict
73 @classmethod 74 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 75 d = dict(src_dict) 76 name = d.pop("name") 77 78 instance_type = d.pop("instanceType") 79 80 accelerator_types = cast(list[str], d.pop("acceleratorTypes")) 81 82 volume_size_gb = d.pop("volumeSizeGB") 83 84 def _parse_git_repositories(data: object) -> list[str] | None | Unset: 85 if data is None: 86 return data 87 if isinstance(data, Unset): 88 return data 89 try: 90 if not isinstance(data, list): 91 raise TypeError() 92 git_repositories_type_0 = cast(list[str], data) 93 94 return git_repositories_type_0 95 except (TypeError, ValueError, AttributeError, KeyError): 96 pass 97 return cast(list[str] | None | Unset, data) 98 99 git_repositories = _parse_git_repositories(d.pop("gitRepositories", UNSET)) 100 101 is_shared_with_project = d.pop("isSharedWithProject", UNSET) 102 103 create_notebook_instance_request = cls( 104 name=name, 105 instance_type=instance_type, 106 accelerator_types=accelerator_types, 107 volume_size_gb=volume_size_gb, 108 git_repositories=git_repositories, 109 is_shared_with_project=is_shared_with_project, 110 ) 111 112 create_notebook_instance_request.additional_properties = d 113 return create_notebook_instance_request
15@_attrs_define 16class CreateProjectAccessRequest: 17 """ 18 Attributes: 19 role (ProjectRole): 20 message (str): 21 """ 22 23 role: ProjectRole 24 message: str 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 role = self.role.value 29 30 message = self.message 31 32 field_dict: dict[str, Any] = {} 33 field_dict.update(self.additional_properties) 34 field_dict.update( 35 { 36 "role": role, 37 "message": message, 38 } 39 ) 40 41 return field_dict 42 43 @classmethod 44 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 45 d = dict(src_dict) 46 role = ProjectRole(d.pop("role")) 47 48 message = d.pop("message") 49 50 create_project_access_request = cls( 51 role=role, 52 message=message, 53 ) 54 55 create_project_access_request.additional_properties = d 56 return create_project_access_request 57 58 @property 59 def additional_keys(self) -> list[str]: 60 return list(self.additional_properties.keys()) 61 62 def __getitem__(self, key: str) -> Any: 63 return self.additional_properties[key] 64 65 def __setitem__(self, key: str, value: Any) -> None: 66 self.additional_properties[key] = value 67 68 def __delitem__(self, key: str) -> None: 69 del self.additional_properties[key] 70 71 def __contains__(self, key: str) -> bool: 72 return key in self.additional_properties
Attributes:
- role (ProjectRole):
- message (str):
25def __init__(self, role, message): 26 self.role = role 27 self.message = message 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CreateProjectAccessRequest.
43 @classmethod 44 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 45 d = dict(src_dict) 46 role = ProjectRole(d.pop("role")) 47 48 message = d.pop("message") 49 50 create_project_access_request = cls( 51 role=role, 52 message=message, 53 ) 54 55 create_project_access_request.additional_properties = d 56 return create_project_access_request
13@_attrs_define 14class CreateReferenceRequest: 15 """ 16 Attributes: 17 name (str): 18 description (str): 19 type_ (str): 20 expected_files (list[str]): 21 """ 22 23 name: str 24 description: str 25 type_: str 26 expected_files: list[str] 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 name = self.name 31 32 description = self.description 33 34 type_ = self.type_ 35 36 expected_files = self.expected_files 37 38 field_dict: dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "name": name, 43 "description": description, 44 "type": type_, 45 "expectedFiles": expected_files, 46 } 47 ) 48 49 return field_dict 50 51 @classmethod 52 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 53 d = dict(src_dict) 54 name = d.pop("name") 55 56 description = d.pop("description") 57 58 type_ = d.pop("type") 59 60 expected_files = cast(list[str], d.pop("expectedFiles")) 61 62 create_reference_request = cls( 63 name=name, 64 description=description, 65 type_=type_, 66 expected_files=expected_files, 67 ) 68 69 create_reference_request.additional_properties = d 70 return create_reference_request 71 72 @property 73 def additional_keys(self) -> list[str]: 74 return list(self.additional_properties.keys()) 75 76 def __getitem__(self, key: str) -> Any: 77 return self.additional_properties[key] 78 79 def __setitem__(self, key: str, value: Any) -> None: 80 self.additional_properties[key] = value 81 82 def __delitem__(self, key: str) -> None: 83 del self.additional_properties[key] 84 85 def __contains__(self, key: str) -> bool: 86 return key in self.additional_properties
Attributes:
- name (str):
- description (str):
- type_ (str):
- expected_files (list[str]):
27def __init__(self, name, description, type_, expected_files): 28 self.name = name 29 self.description = description 30 self.type_ = type_ 31 self.expected_files = expected_files 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CreateReferenceRequest.
29 def to_dict(self) -> dict[str, Any]: 30 name = self.name 31 32 description = self.description 33 34 type_ = self.type_ 35 36 expected_files = self.expected_files 37 38 field_dict: dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "name": name, 43 "description": description, 44 "type": type_, 45 "expectedFiles": expected_files, 46 } 47 ) 48 49 return field_dict
51 @classmethod 52 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 53 d = dict(src_dict) 54 name = d.pop("name") 55 56 description = d.pop("description") 57 58 type_ = d.pop("type") 59 60 expected_files = cast(list[str], d.pop("expectedFiles")) 61 62 create_reference_request = cls( 63 name=name, 64 description=description, 65 type_=type_, 66 expected_files=expected_files, 67 ) 68 69 create_reference_request.additional_properties = d 70 return create_reference_request
13@_attrs_define 14class CreateResponse: 15 """ 16 Attributes: 17 id (str): 18 message (str): 19 """ 20 21 id: str 22 message: str 23 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 24 25 def to_dict(self) -> dict[str, Any]: 26 id = self.id 27 28 message = self.message 29 30 field_dict: dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "id": id, 35 "message": message, 36 } 37 ) 38 39 return field_dict 40 41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 id = d.pop("id") 45 46 message = d.pop("message") 47 48 create_response = cls( 49 id=id, 50 message=message, 51 ) 52 53 create_response.additional_properties = d 54 return create_response 55 56 @property 57 def additional_keys(self) -> list[str]: 58 return list(self.additional_properties.keys()) 59 60 def __getitem__(self, key: str) -> Any: 61 return self.additional_properties[key] 62 63 def __setitem__(self, key: str, value: Any) -> None: 64 self.additional_properties[key] = value 65 66 def __delitem__(self, key: str) -> None: 67 del self.additional_properties[key] 68 69 def __contains__(self, key: str) -> bool: 70 return key in self.additional_properties
Attributes:
- id (str):
- message (str):
25def __init__(self, id, message): 26 self.id = id 27 self.message = message 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CreateResponse.
5class CustomerType(str, Enum): 6 CONSORTIUM = "CONSORTIUM" 7 EXTERNAL = "EXTERNAL" 8 INTERNAL = "INTERNAL" 9 UNKNOWN = "UNKNOWN" 10 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 11 12 def __str__(self) -> str: 13 return str(self.value) 14 15 @classmethod 16 def _missing_(cls, number): 17 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
19@_attrs_define 20class CustomPipelineSettings: 21 """Used to describe the location of the process definition dependencies 22 23 Attributes: 24 repository (str): GitHub repository that contains the process definition Example: CirroBio/my-pipeline. 25 branch (str | Unset): Branch, tag, or commit hash of the repo that contains the process definition Default: 26 'main'. 27 folder (str | Unset): Folder within the repo that contains the process definition Default: '.cirro'. 28 repository_type (None | RepositoryType | Unset): 29 last_sync (datetime.datetime | None | Unset): Time of last sync 30 sync_status (None | SyncStatus | Unset): 31 commit_hash (None | str | Unset): Commit hash of the last successful sync 32 is_authorized (bool | Unset): Whether we are authorized to access the repository Default: False. 33 """ 34 35 repository: str 36 branch: str | Unset = "main" 37 folder: str | Unset = ".cirro" 38 repository_type: None | RepositoryType | Unset = UNSET 39 last_sync: datetime.datetime | None | Unset = UNSET 40 sync_status: None | SyncStatus | Unset = UNSET 41 commit_hash: None | str | Unset = UNSET 42 is_authorized: bool | Unset = False 43 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 44 45 def to_dict(self) -> dict[str, Any]: 46 repository = self.repository 47 48 branch = self.branch 49 50 folder = self.folder 51 52 repository_type: None | str | Unset 53 if isinstance(self.repository_type, Unset): 54 repository_type = UNSET 55 elif isinstance(self.repository_type, RepositoryType): 56 repository_type = self.repository_type.value 57 else: 58 repository_type = self.repository_type 59 60 last_sync: None | str | Unset 61 if isinstance(self.last_sync, Unset): 62 last_sync = UNSET 63 elif isinstance(self.last_sync, datetime.datetime): 64 last_sync = self.last_sync.isoformat() 65 else: 66 last_sync = self.last_sync 67 68 sync_status: None | str | Unset 69 if isinstance(self.sync_status, Unset): 70 sync_status = UNSET 71 elif isinstance(self.sync_status, SyncStatus): 72 sync_status = self.sync_status.value 73 else: 74 sync_status = self.sync_status 75 76 commit_hash: None | str | Unset 77 if isinstance(self.commit_hash, Unset): 78 commit_hash = UNSET 79 else: 80 commit_hash = self.commit_hash 81 82 is_authorized = self.is_authorized 83 84 field_dict: dict[str, Any] = {} 85 field_dict.update(self.additional_properties) 86 field_dict.update( 87 { 88 "repository": repository, 89 } 90 ) 91 if branch is not UNSET: 92 field_dict["branch"] = branch 93 if folder is not UNSET: 94 field_dict["folder"] = folder 95 if repository_type is not UNSET: 96 field_dict["repositoryType"] = repository_type 97 if last_sync is not UNSET: 98 field_dict["lastSync"] = last_sync 99 if sync_status is not UNSET: 100 field_dict["syncStatus"] = sync_status 101 if commit_hash is not UNSET: 102 field_dict["commitHash"] = commit_hash 103 if is_authorized is not UNSET: 104 field_dict["isAuthorized"] = is_authorized 105 106 return field_dict 107 108 @classmethod 109 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 110 d = dict(src_dict) 111 repository = d.pop("repository") 112 113 branch = d.pop("branch", UNSET) 114 115 folder = d.pop("folder", UNSET) 116 117 def _parse_repository_type(data: object) -> None | RepositoryType | Unset: 118 if data is None: 119 return data 120 if isinstance(data, Unset): 121 return data 122 try: 123 if not isinstance(data, str): 124 raise TypeError() 125 repository_type_type_1 = RepositoryType(data) 126 127 return repository_type_type_1 128 except (TypeError, ValueError, AttributeError, KeyError): 129 pass 130 return cast(None | RepositoryType | Unset, data) 131 132 repository_type = _parse_repository_type(d.pop("repositoryType", UNSET)) 133 134 def _parse_last_sync(data: object) -> datetime.datetime | None | Unset: 135 if data is None: 136 return data 137 if isinstance(data, Unset): 138 return data 139 try: 140 if not isinstance(data, str): 141 raise TypeError() 142 last_sync_type_0 = isoparse(data) 143 144 return last_sync_type_0 145 except (TypeError, ValueError, AttributeError, KeyError): 146 pass 147 return cast(datetime.datetime | None | Unset, data) 148 149 last_sync = _parse_last_sync(d.pop("lastSync", UNSET)) 150 151 def _parse_sync_status(data: object) -> None | SyncStatus | Unset: 152 if data is None: 153 return data 154 if isinstance(data, Unset): 155 return data 156 try: 157 if not isinstance(data, str): 158 raise TypeError() 159 sync_status_type_1 = SyncStatus(data) 160 161 return sync_status_type_1 162 except (TypeError, ValueError, AttributeError, KeyError): 163 pass 164 return cast(None | SyncStatus | Unset, data) 165 166 sync_status = _parse_sync_status(d.pop("syncStatus", UNSET)) 167 168 def _parse_commit_hash(data: object) -> None | str | Unset: 169 if data is None: 170 return data 171 if isinstance(data, Unset): 172 return data 173 return cast(None | str | Unset, data) 174 175 commit_hash = _parse_commit_hash(d.pop("commitHash", UNSET)) 176 177 is_authorized = d.pop("isAuthorized", UNSET) 178 179 custom_pipeline_settings = cls( 180 repository=repository, 181 branch=branch, 182 folder=folder, 183 repository_type=repository_type, 184 last_sync=last_sync, 185 sync_status=sync_status, 186 commit_hash=commit_hash, 187 is_authorized=is_authorized, 188 ) 189 190 custom_pipeline_settings.additional_properties = d 191 return custom_pipeline_settings 192 193 @property 194 def additional_keys(self) -> list[str]: 195 return list(self.additional_properties.keys()) 196 197 def __getitem__(self, key: str) -> Any: 198 return self.additional_properties[key] 199 200 def __setitem__(self, key: str, value: Any) -> None: 201 self.additional_properties[key] = value 202 203 def __delitem__(self, key: str) -> None: 204 del self.additional_properties[key] 205 206 def __contains__(self, key: str) -> bool: 207 return key in self.additional_properties
Used to describe the location of the process definition dependencies
Attributes:
- repository (str): GitHub repository that contains the process definition Example: CirroBio/my-pipeline.
- branch (str | Unset): Branch, tag, or commit hash of the repo that contains the process definition Default: 'main'.
- folder (str | Unset): Folder within the repo that contains the process definition Default: '.cirro'.
- repository_type (None | RepositoryType | Unset):
- last_sync (datetime.datetime | None | Unset): Time of last sync
- sync_status (None | SyncStatus | Unset):
- commit_hash (None | str | Unset): Commit hash of the last successful sync
- is_authorized (bool | Unset): Whether we are authorized to access the repository Default: False.
31def __init__(self, repository, branch=attr_dict['branch'].default, folder=attr_dict['folder'].default, repository_type=attr_dict['repository_type'].default, last_sync=attr_dict['last_sync'].default, sync_status=attr_dict['sync_status'].default, commit_hash=attr_dict['commit_hash'].default, is_authorized=attr_dict['is_authorized'].default): 32 self.repository = repository 33 self.branch = branch 34 self.folder = folder 35 self.repository_type = repository_type 36 self.last_sync = last_sync 37 self.sync_status = sync_status 38 self.commit_hash = commit_hash 39 self.is_authorized = is_authorized 40 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CustomPipelineSettings.
45 def to_dict(self) -> dict[str, Any]: 46 repository = self.repository 47 48 branch = self.branch 49 50 folder = self.folder 51 52 repository_type: None | str | Unset 53 if isinstance(self.repository_type, Unset): 54 repository_type = UNSET 55 elif isinstance(self.repository_type, RepositoryType): 56 repository_type = self.repository_type.value 57 else: 58 repository_type = self.repository_type 59 60 last_sync: None | str | Unset 61 if isinstance(self.last_sync, Unset): 62 last_sync = UNSET 63 elif isinstance(self.last_sync, datetime.datetime): 64 last_sync = self.last_sync.isoformat() 65 else: 66 last_sync = self.last_sync 67 68 sync_status: None | str | Unset 69 if isinstance(self.sync_status, Unset): 70 sync_status = UNSET 71 elif isinstance(self.sync_status, SyncStatus): 72 sync_status = self.sync_status.value 73 else: 74 sync_status = self.sync_status 75 76 commit_hash: None | str | Unset 77 if isinstance(self.commit_hash, Unset): 78 commit_hash = UNSET 79 else: 80 commit_hash = self.commit_hash 81 82 is_authorized = self.is_authorized 83 84 field_dict: dict[str, Any] = {} 85 field_dict.update(self.additional_properties) 86 field_dict.update( 87 { 88 "repository": repository, 89 } 90 ) 91 if branch is not UNSET: 92 field_dict["branch"] = branch 93 if folder is not UNSET: 94 field_dict["folder"] = folder 95 if repository_type is not UNSET: 96 field_dict["repositoryType"] = repository_type 97 if last_sync is not UNSET: 98 field_dict["lastSync"] = last_sync 99 if sync_status is not UNSET: 100 field_dict["syncStatus"] = sync_status 101 if commit_hash is not UNSET: 102 field_dict["commitHash"] = commit_hash 103 if is_authorized is not UNSET: 104 field_dict["isAuthorized"] = is_authorized 105 106 return field_dict
108 @classmethod 109 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 110 d = dict(src_dict) 111 repository = d.pop("repository") 112 113 branch = d.pop("branch", UNSET) 114 115 folder = d.pop("folder", UNSET) 116 117 def _parse_repository_type(data: object) -> None | RepositoryType | Unset: 118 if data is None: 119 return data 120 if isinstance(data, Unset): 121 return data 122 try: 123 if not isinstance(data, str): 124 raise TypeError() 125 repository_type_type_1 = RepositoryType(data) 126 127 return repository_type_type_1 128 except (TypeError, ValueError, AttributeError, KeyError): 129 pass 130 return cast(None | RepositoryType | Unset, data) 131 132 repository_type = _parse_repository_type(d.pop("repositoryType", UNSET)) 133 134 def _parse_last_sync(data: object) -> datetime.datetime | None | Unset: 135 if data is None: 136 return data 137 if isinstance(data, Unset): 138 return data 139 try: 140 if not isinstance(data, str): 141 raise TypeError() 142 last_sync_type_0 = isoparse(data) 143 144 return last_sync_type_0 145 except (TypeError, ValueError, AttributeError, KeyError): 146 pass 147 return cast(datetime.datetime | None | Unset, data) 148 149 last_sync = _parse_last_sync(d.pop("lastSync", UNSET)) 150 151 def _parse_sync_status(data: object) -> None | SyncStatus | Unset: 152 if data is None: 153 return data 154 if isinstance(data, Unset): 155 return data 156 try: 157 if not isinstance(data, str): 158 raise TypeError() 159 sync_status_type_1 = SyncStatus(data) 160 161 return sync_status_type_1 162 except (TypeError, ValueError, AttributeError, KeyError): 163 pass 164 return cast(None | SyncStatus | Unset, data) 165 166 sync_status = _parse_sync_status(d.pop("syncStatus", UNSET)) 167 168 def _parse_commit_hash(data: object) -> None | str | Unset: 169 if data is None: 170 return data 171 if isinstance(data, Unset): 172 return data 173 return cast(None | str | Unset, data) 174 175 commit_hash = _parse_commit_hash(d.pop("commitHash", UNSET)) 176 177 is_authorized = d.pop("isAuthorized", UNSET) 178 179 custom_pipeline_settings = cls( 180 repository=repository, 181 branch=branch, 182 folder=folder, 183 repository_type=repository_type, 184 last_sync=last_sync, 185 sync_status=sync_status, 186 commit_hash=commit_hash, 187 is_authorized=is_authorized, 188 ) 189 190 custom_pipeline_settings.additional_properties = d 191 return custom_pipeline_settings
22@_attrs_define 23class CustomProcessInput: 24 """ 25 Attributes: 26 id (str): Unique ID of the Process Example: process-hutch-magic_flute-1_0. 27 name (str): Friendly name for the process Example: MAGeCK Flute. 28 description (str): Description of the process Example: MAGeCK Flute enables accurate identification of essential 29 genes with their related biological functions. 30 executor (Executor): How the workflow is executed 31 child_process_ids (list[str]): IDs of pipelines that can be run downstream 32 parent_process_ids (list[str]): IDs of processes that can run this pipeline 33 linked_project_ids (list[str]): Projects that can run this process 34 data_type (None | str | Unset): Name of the data type this pipeline produces (if it is not defined, use the 35 name) 36 category (str | Unset): Category of the process Example: Microbial Analysis. 37 documentation_url (None | str | Unset): Link to process documentation Example: 38 https://docs.cirro.bio/pipelines/catalog_targeted_sequencing/#crispr-screen-analysis. 39 file_requirements_message (None | str | Unset): Description of the files to be uploaded (optional) 40 pipeline_code (None | PipelineCode | Unset): 41 is_tenant_wide (bool | Unset): Whether the process is shared with the tenant 42 allow_multiple_sources (bool | Unset): Whether the pipeline is allowed to have multiple dataset sources 43 uses_sample_sheet (bool | Unset): Whether the pipeline uses the Cirro-provided sample sheet 44 custom_settings (CustomPipelineSettings | None | Unset): 45 file_mapping_rules (list[FileMappingRule] | None | Unset): 46 """ 47 48 id: str 49 name: str 50 description: str 51 executor: Executor 52 child_process_ids: list[str] 53 parent_process_ids: list[str] 54 linked_project_ids: list[str] 55 data_type: None | str | Unset = UNSET 56 category: str | Unset = UNSET 57 documentation_url: None | str | Unset = UNSET 58 file_requirements_message: None | str | Unset = UNSET 59 pipeline_code: None | PipelineCode | Unset = UNSET 60 is_tenant_wide: bool | Unset = UNSET 61 allow_multiple_sources: bool | Unset = UNSET 62 uses_sample_sheet: bool | Unset = UNSET 63 custom_settings: CustomPipelineSettings | None | Unset = UNSET 64 file_mapping_rules: list[FileMappingRule] | None | Unset = UNSET 65 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 66 67 def to_dict(self) -> dict[str, Any]: 68 from ..models.custom_pipeline_settings import CustomPipelineSettings 69 from ..models.pipeline_code import PipelineCode 70 71 id = self.id 72 73 name = self.name 74 75 description = self.description 76 77 executor = self.executor.value 78 79 child_process_ids = self.child_process_ids 80 81 parent_process_ids = self.parent_process_ids 82 83 linked_project_ids = self.linked_project_ids 84 85 data_type: None | str | Unset 86 if isinstance(self.data_type, Unset): 87 data_type = UNSET 88 else: 89 data_type = self.data_type 90 91 category = self.category 92 93 documentation_url: None | str | Unset 94 if isinstance(self.documentation_url, Unset): 95 documentation_url = UNSET 96 else: 97 documentation_url = self.documentation_url 98 99 file_requirements_message: None | str | Unset 100 if isinstance(self.file_requirements_message, Unset): 101 file_requirements_message = UNSET 102 else: 103 file_requirements_message = self.file_requirements_message 104 105 pipeline_code: dict[str, Any] | None | Unset 106 if isinstance(self.pipeline_code, Unset): 107 pipeline_code = UNSET 108 elif isinstance(self.pipeline_code, PipelineCode): 109 pipeline_code = self.pipeline_code.to_dict() 110 else: 111 pipeline_code = self.pipeline_code 112 113 is_tenant_wide = self.is_tenant_wide 114 115 allow_multiple_sources = self.allow_multiple_sources 116 117 uses_sample_sheet = self.uses_sample_sheet 118 119 custom_settings: dict[str, Any] | None | Unset 120 if isinstance(self.custom_settings, Unset): 121 custom_settings = UNSET 122 elif isinstance(self.custom_settings, CustomPipelineSettings): 123 custom_settings = self.custom_settings.to_dict() 124 else: 125 custom_settings = self.custom_settings 126 127 file_mapping_rules: list[dict[str, Any]] | None | Unset 128 if isinstance(self.file_mapping_rules, Unset): 129 file_mapping_rules = UNSET 130 elif isinstance(self.file_mapping_rules, list): 131 file_mapping_rules = [] 132 for file_mapping_rules_type_0_item_data in self.file_mapping_rules: 133 file_mapping_rules_type_0_item = file_mapping_rules_type_0_item_data.to_dict() 134 file_mapping_rules.append(file_mapping_rules_type_0_item) 135 136 else: 137 file_mapping_rules = self.file_mapping_rules 138 139 field_dict: dict[str, Any] = {} 140 field_dict.update(self.additional_properties) 141 field_dict.update( 142 { 143 "id": id, 144 "name": name, 145 "description": description, 146 "executor": executor, 147 "childProcessIds": child_process_ids, 148 "parentProcessIds": parent_process_ids, 149 "linkedProjectIds": linked_project_ids, 150 } 151 ) 152 if data_type is not UNSET: 153 field_dict["dataType"] = data_type 154 if category is not UNSET: 155 field_dict["category"] = category 156 if documentation_url is not UNSET: 157 field_dict["documentationUrl"] = documentation_url 158 if file_requirements_message is not UNSET: 159 field_dict["fileRequirementsMessage"] = file_requirements_message 160 if pipeline_code is not UNSET: 161 field_dict["pipelineCode"] = pipeline_code 162 if is_tenant_wide is not UNSET: 163 field_dict["isTenantWide"] = is_tenant_wide 164 if allow_multiple_sources is not UNSET: 165 field_dict["allowMultipleSources"] = allow_multiple_sources 166 if uses_sample_sheet is not UNSET: 167 field_dict["usesSampleSheet"] = uses_sample_sheet 168 if custom_settings is not UNSET: 169 field_dict["customSettings"] = custom_settings 170 if file_mapping_rules is not UNSET: 171 field_dict["fileMappingRules"] = file_mapping_rules 172 173 return field_dict 174 175 @classmethod 176 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 177 from ..models.custom_pipeline_settings import CustomPipelineSettings 178 from ..models.file_mapping_rule import FileMappingRule 179 from ..models.pipeline_code import PipelineCode 180 181 d = dict(src_dict) 182 id = d.pop("id") 183 184 name = d.pop("name") 185 186 description = d.pop("description") 187 188 executor = Executor(d.pop("executor")) 189 190 child_process_ids = cast(list[str], d.pop("childProcessIds")) 191 192 parent_process_ids = cast(list[str], d.pop("parentProcessIds")) 193 194 linked_project_ids = cast(list[str], d.pop("linkedProjectIds")) 195 196 def _parse_data_type(data: object) -> None | str | Unset: 197 if data is None: 198 return data 199 if isinstance(data, Unset): 200 return data 201 return cast(None | str | Unset, data) 202 203 data_type = _parse_data_type(d.pop("dataType", UNSET)) 204 205 category = d.pop("category", UNSET) 206 207 def _parse_documentation_url(data: object) -> None | str | Unset: 208 if data is None: 209 return data 210 if isinstance(data, Unset): 211 return data 212 return cast(None | str | Unset, data) 213 214 documentation_url = _parse_documentation_url(d.pop("documentationUrl", UNSET)) 215 216 def _parse_file_requirements_message(data: object) -> None | str | Unset: 217 if data is None: 218 return data 219 if isinstance(data, Unset): 220 return data 221 return cast(None | str | Unset, data) 222 223 file_requirements_message = _parse_file_requirements_message(d.pop("fileRequirementsMessage", UNSET)) 224 225 def _parse_pipeline_code(data: object) -> None | PipelineCode | Unset: 226 if data is None: 227 return data 228 if isinstance(data, Unset): 229 return data 230 try: 231 if not isinstance(data, dict): 232 raise TypeError() 233 pipeline_code_type_1 = PipelineCode.from_dict(data) 234 235 return pipeline_code_type_1 236 except (TypeError, ValueError, AttributeError, KeyError): 237 pass 238 return cast(None | PipelineCode | Unset, data) 239 240 pipeline_code = _parse_pipeline_code(d.pop("pipelineCode", UNSET)) 241 242 is_tenant_wide = d.pop("isTenantWide", UNSET) 243 244 allow_multiple_sources = d.pop("allowMultipleSources", UNSET) 245 246 uses_sample_sheet = d.pop("usesSampleSheet", UNSET) 247 248 def _parse_custom_settings(data: object) -> CustomPipelineSettings | None | Unset: 249 if data is None: 250 return data 251 if isinstance(data, Unset): 252 return data 253 try: 254 if not isinstance(data, dict): 255 raise TypeError() 256 custom_settings_type_1 = CustomPipelineSettings.from_dict(data) 257 258 return custom_settings_type_1 259 except (TypeError, ValueError, AttributeError, KeyError): 260 pass 261 return cast(CustomPipelineSettings | None | Unset, data) 262 263 custom_settings = _parse_custom_settings(d.pop("customSettings", UNSET)) 264 265 def _parse_file_mapping_rules(data: object) -> list[FileMappingRule] | None | Unset: 266 if data is None: 267 return data 268 if isinstance(data, Unset): 269 return data 270 try: 271 if not isinstance(data, list): 272 raise TypeError() 273 file_mapping_rules_type_0 = [] 274 _file_mapping_rules_type_0 = data 275 for file_mapping_rules_type_0_item_data in _file_mapping_rules_type_0: 276 file_mapping_rules_type_0_item = FileMappingRule.from_dict(file_mapping_rules_type_0_item_data) 277 278 file_mapping_rules_type_0.append(file_mapping_rules_type_0_item) 279 280 return file_mapping_rules_type_0 281 except (TypeError, ValueError, AttributeError, KeyError): 282 pass 283 return cast(list[FileMappingRule] | None | Unset, data) 284 285 file_mapping_rules = _parse_file_mapping_rules(d.pop("fileMappingRules", UNSET)) 286 287 custom_process_input = cls( 288 id=id, 289 name=name, 290 description=description, 291 executor=executor, 292 child_process_ids=child_process_ids, 293 parent_process_ids=parent_process_ids, 294 linked_project_ids=linked_project_ids, 295 data_type=data_type, 296 category=category, 297 documentation_url=documentation_url, 298 file_requirements_message=file_requirements_message, 299 pipeline_code=pipeline_code, 300 is_tenant_wide=is_tenant_wide, 301 allow_multiple_sources=allow_multiple_sources, 302 uses_sample_sheet=uses_sample_sheet, 303 custom_settings=custom_settings, 304 file_mapping_rules=file_mapping_rules, 305 ) 306 307 custom_process_input.additional_properties = d 308 return custom_process_input 309 310 @property 311 def additional_keys(self) -> list[str]: 312 return list(self.additional_properties.keys()) 313 314 def __getitem__(self, key: str) -> Any: 315 return self.additional_properties[key] 316 317 def __setitem__(self, key: str, value: Any) -> None: 318 self.additional_properties[key] = value 319 320 def __delitem__(self, key: str) -> None: 321 del self.additional_properties[key] 322 323 def __contains__(self, key: str) -> bool: 324 return key in self.additional_properties
Attributes:
- id (str): Unique ID of the Process Example: process-hutch-magic_flute-1_0.
- name (str): Friendly name for the process Example: MAGeCK Flute.
- description (str): Description of the process Example: MAGeCK Flute enables accurate identification of essential genes with their related biological functions.
- executor (Executor): How the workflow is executed
- child_process_ids (list[str]): IDs of pipelines that can be run downstream
- parent_process_ids (list[str]): IDs of processes that can run this pipeline
- linked_project_ids (list[str]): Projects that can run this process
- data_type (None | str | Unset): Name of the data type this pipeline produces (if it is not defined, use the name)
- category (str | Unset): Category of the process Example: Microbial Analysis.
- documentation_url (None | str | Unset): Link to process documentation Example: https://docs.cirro.bio/pipelines/catalog_targeted_sequencing/#crispr-screen-analysis.
- file_requirements_message (None | str | Unset): Description of the files to be uploaded (optional)
- pipeline_code (None | PipelineCode | Unset):
- is_tenant_wide (bool | Unset): Whether the process is shared with the tenant
- allow_multiple_sources (bool | Unset): Whether the pipeline is allowed to have multiple dataset sources
- uses_sample_sheet (bool | Unset): Whether the pipeline uses the Cirro-provided sample sheet
- custom_settings (CustomPipelineSettings | None | Unset):
- file_mapping_rules (list[FileMappingRule] | None | Unset):
40def __init__(self, id, name, description, executor, child_process_ids, parent_process_ids, linked_project_ids, data_type=attr_dict['data_type'].default, category=attr_dict['category'].default, documentation_url=attr_dict['documentation_url'].default, file_requirements_message=attr_dict['file_requirements_message'].default, pipeline_code=attr_dict['pipeline_code'].default, is_tenant_wide=attr_dict['is_tenant_wide'].default, allow_multiple_sources=attr_dict['allow_multiple_sources'].default, uses_sample_sheet=attr_dict['uses_sample_sheet'].default, custom_settings=attr_dict['custom_settings'].default, file_mapping_rules=attr_dict['file_mapping_rules'].default): 41 self.id = id 42 self.name = name 43 self.description = description 44 self.executor = executor 45 self.child_process_ids = child_process_ids 46 self.parent_process_ids = parent_process_ids 47 self.linked_project_ids = linked_project_ids 48 self.data_type = data_type 49 self.category = category 50 self.documentation_url = documentation_url 51 self.file_requirements_message = file_requirements_message 52 self.pipeline_code = pipeline_code 53 self.is_tenant_wide = is_tenant_wide 54 self.allow_multiple_sources = allow_multiple_sources 55 self.uses_sample_sheet = uses_sample_sheet 56 self.custom_settings = custom_settings 57 self.file_mapping_rules = file_mapping_rules 58 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CustomProcessInput.
67 def to_dict(self) -> dict[str, Any]: 68 from ..models.custom_pipeline_settings import CustomPipelineSettings 69 from ..models.pipeline_code import PipelineCode 70 71 id = self.id 72 73 name = self.name 74 75 description = self.description 76 77 executor = self.executor.value 78 79 child_process_ids = self.child_process_ids 80 81 parent_process_ids = self.parent_process_ids 82 83 linked_project_ids = self.linked_project_ids 84 85 data_type: None | str | Unset 86 if isinstance(self.data_type, Unset): 87 data_type = UNSET 88 else: 89 data_type = self.data_type 90 91 category = self.category 92 93 documentation_url: None | str | Unset 94 if isinstance(self.documentation_url, Unset): 95 documentation_url = UNSET 96 else: 97 documentation_url = self.documentation_url 98 99 file_requirements_message: None | str | Unset 100 if isinstance(self.file_requirements_message, Unset): 101 file_requirements_message = UNSET 102 else: 103 file_requirements_message = self.file_requirements_message 104 105 pipeline_code: dict[str, Any] | None | Unset 106 if isinstance(self.pipeline_code, Unset): 107 pipeline_code = UNSET 108 elif isinstance(self.pipeline_code, PipelineCode): 109 pipeline_code = self.pipeline_code.to_dict() 110 else: 111 pipeline_code = self.pipeline_code 112 113 is_tenant_wide = self.is_tenant_wide 114 115 allow_multiple_sources = self.allow_multiple_sources 116 117 uses_sample_sheet = self.uses_sample_sheet 118 119 custom_settings: dict[str, Any] | None | Unset 120 if isinstance(self.custom_settings, Unset): 121 custom_settings = UNSET 122 elif isinstance(self.custom_settings, CustomPipelineSettings): 123 custom_settings = self.custom_settings.to_dict() 124 else: 125 custom_settings = self.custom_settings 126 127 file_mapping_rules: list[dict[str, Any]] | None | Unset 128 if isinstance(self.file_mapping_rules, Unset): 129 file_mapping_rules = UNSET 130 elif isinstance(self.file_mapping_rules, list): 131 file_mapping_rules = [] 132 for file_mapping_rules_type_0_item_data in self.file_mapping_rules: 133 file_mapping_rules_type_0_item = file_mapping_rules_type_0_item_data.to_dict() 134 file_mapping_rules.append(file_mapping_rules_type_0_item) 135 136 else: 137 file_mapping_rules = self.file_mapping_rules 138 139 field_dict: dict[str, Any] = {} 140 field_dict.update(self.additional_properties) 141 field_dict.update( 142 { 143 "id": id, 144 "name": name, 145 "description": description, 146 "executor": executor, 147 "childProcessIds": child_process_ids, 148 "parentProcessIds": parent_process_ids, 149 "linkedProjectIds": linked_project_ids, 150 } 151 ) 152 if data_type is not UNSET: 153 field_dict["dataType"] = data_type 154 if category is not UNSET: 155 field_dict["category"] = category 156 if documentation_url is not UNSET: 157 field_dict["documentationUrl"] = documentation_url 158 if file_requirements_message is not UNSET: 159 field_dict["fileRequirementsMessage"] = file_requirements_message 160 if pipeline_code is not UNSET: 161 field_dict["pipelineCode"] = pipeline_code 162 if is_tenant_wide is not UNSET: 163 field_dict["isTenantWide"] = is_tenant_wide 164 if allow_multiple_sources is not UNSET: 165 field_dict["allowMultipleSources"] = allow_multiple_sources 166 if uses_sample_sheet is not UNSET: 167 field_dict["usesSampleSheet"] = uses_sample_sheet 168 if custom_settings is not UNSET: 169 field_dict["customSettings"] = custom_settings 170 if file_mapping_rules is not UNSET: 171 field_dict["fileMappingRules"] = file_mapping_rules 172 173 return field_dict
175 @classmethod 176 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 177 from ..models.custom_pipeline_settings import CustomPipelineSettings 178 from ..models.file_mapping_rule import FileMappingRule 179 from ..models.pipeline_code import PipelineCode 180 181 d = dict(src_dict) 182 id = d.pop("id") 183 184 name = d.pop("name") 185 186 description = d.pop("description") 187 188 executor = Executor(d.pop("executor")) 189 190 child_process_ids = cast(list[str], d.pop("childProcessIds")) 191 192 parent_process_ids = cast(list[str], d.pop("parentProcessIds")) 193 194 linked_project_ids = cast(list[str], d.pop("linkedProjectIds")) 195 196 def _parse_data_type(data: object) -> None | str | Unset: 197 if data is None: 198 return data 199 if isinstance(data, Unset): 200 return data 201 return cast(None | str | Unset, data) 202 203 data_type = _parse_data_type(d.pop("dataType", UNSET)) 204 205 category = d.pop("category", UNSET) 206 207 def _parse_documentation_url(data: object) -> None | str | Unset: 208 if data is None: 209 return data 210 if isinstance(data, Unset): 211 return data 212 return cast(None | str | Unset, data) 213 214 documentation_url = _parse_documentation_url(d.pop("documentationUrl", UNSET)) 215 216 def _parse_file_requirements_message(data: object) -> None | str | Unset: 217 if data is None: 218 return data 219 if isinstance(data, Unset): 220 return data 221 return cast(None | str | Unset, data) 222 223 file_requirements_message = _parse_file_requirements_message(d.pop("fileRequirementsMessage", UNSET)) 224 225 def _parse_pipeline_code(data: object) -> None | PipelineCode | Unset: 226 if data is None: 227 return data 228 if isinstance(data, Unset): 229 return data 230 try: 231 if not isinstance(data, dict): 232 raise TypeError() 233 pipeline_code_type_1 = PipelineCode.from_dict(data) 234 235 return pipeline_code_type_1 236 except (TypeError, ValueError, AttributeError, KeyError): 237 pass 238 return cast(None | PipelineCode | Unset, data) 239 240 pipeline_code = _parse_pipeline_code(d.pop("pipelineCode", UNSET)) 241 242 is_tenant_wide = d.pop("isTenantWide", UNSET) 243 244 allow_multiple_sources = d.pop("allowMultipleSources", UNSET) 245 246 uses_sample_sheet = d.pop("usesSampleSheet", UNSET) 247 248 def _parse_custom_settings(data: object) -> CustomPipelineSettings | None | Unset: 249 if data is None: 250 return data 251 if isinstance(data, Unset): 252 return data 253 try: 254 if not isinstance(data, dict): 255 raise TypeError() 256 custom_settings_type_1 = CustomPipelineSettings.from_dict(data) 257 258 return custom_settings_type_1 259 except (TypeError, ValueError, AttributeError, KeyError): 260 pass 261 return cast(CustomPipelineSettings | None | Unset, data) 262 263 custom_settings = _parse_custom_settings(d.pop("customSettings", UNSET)) 264 265 def _parse_file_mapping_rules(data: object) -> list[FileMappingRule] | None | Unset: 266 if data is None: 267 return data 268 if isinstance(data, Unset): 269 return data 270 try: 271 if not isinstance(data, list): 272 raise TypeError() 273 file_mapping_rules_type_0 = [] 274 _file_mapping_rules_type_0 = data 275 for file_mapping_rules_type_0_item_data in _file_mapping_rules_type_0: 276 file_mapping_rules_type_0_item = FileMappingRule.from_dict(file_mapping_rules_type_0_item_data) 277 278 file_mapping_rules_type_0.append(file_mapping_rules_type_0_item) 279 280 return file_mapping_rules_type_0 281 except (TypeError, ValueError, AttributeError, KeyError): 282 pass 283 return cast(list[FileMappingRule] | None | Unset, data) 284 285 file_mapping_rules = _parse_file_mapping_rules(d.pop("fileMappingRules", UNSET)) 286 287 custom_process_input = cls( 288 id=id, 289 name=name, 290 description=description, 291 executor=executor, 292 child_process_ids=child_process_ids, 293 parent_process_ids=parent_process_ids, 294 linked_project_ids=linked_project_ids, 295 data_type=data_type, 296 category=category, 297 documentation_url=documentation_url, 298 file_requirements_message=file_requirements_message, 299 pipeline_code=pipeline_code, 300 is_tenant_wide=is_tenant_wide, 301 allow_multiple_sources=allow_multiple_sources, 302 uses_sample_sheet=uses_sample_sheet, 303 custom_settings=custom_settings, 304 file_mapping_rules=file_mapping_rules, 305 ) 306 307 custom_process_input.additional_properties = d 308 return custom_process_input
22@_attrs_define 23class Dashboard: 24 """ 25 Attributes: 26 id (str): 27 name (str): 28 description (str): 29 process_ids (list[str]): 30 created_by (str): 31 created_at (datetime.datetime): 32 updated_at (datetime.datetime): 33 dashboard_data (DashboardDashboardData | Unset): 34 info (DashboardInfo | Unset): 35 """ 36 37 id: str 38 name: str 39 description: str 40 process_ids: list[str] 41 created_by: str 42 created_at: datetime.datetime 43 updated_at: datetime.datetime 44 dashboard_data: DashboardDashboardData | Unset = UNSET 45 info: DashboardInfo | Unset = UNSET 46 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 47 48 def to_dict(self) -> dict[str, Any]: 49 id = self.id 50 51 name = self.name 52 53 description = self.description 54 55 process_ids = self.process_ids 56 57 created_by = self.created_by 58 59 created_at = self.created_at.isoformat() 60 61 updated_at = self.updated_at.isoformat() 62 63 dashboard_data: dict[str, Any] | Unset = UNSET 64 if not isinstance(self.dashboard_data, Unset): 65 dashboard_data = self.dashboard_data.to_dict() 66 67 info: dict[str, Any] | Unset = UNSET 68 if not isinstance(self.info, Unset): 69 info = self.info.to_dict() 70 71 field_dict: dict[str, Any] = {} 72 field_dict.update(self.additional_properties) 73 field_dict.update( 74 { 75 "id": id, 76 "name": name, 77 "description": description, 78 "processIds": process_ids, 79 "createdBy": created_by, 80 "createdAt": created_at, 81 "updatedAt": updated_at, 82 } 83 ) 84 if dashboard_data is not UNSET: 85 field_dict["dashboardData"] = dashboard_data 86 if info is not UNSET: 87 field_dict["info"] = info 88 89 return field_dict 90 91 @classmethod 92 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 93 from ..models.dashboard_dashboard_data import DashboardDashboardData 94 from ..models.dashboard_info import DashboardInfo 95 96 d = dict(src_dict) 97 id = d.pop("id") 98 99 name = d.pop("name") 100 101 description = d.pop("description") 102 103 process_ids = cast(list[str], d.pop("processIds")) 104 105 created_by = d.pop("createdBy") 106 107 created_at = isoparse(d.pop("createdAt")) 108 109 updated_at = isoparse(d.pop("updatedAt")) 110 111 _dashboard_data = d.pop("dashboardData", UNSET) 112 dashboard_data: DashboardDashboardData | Unset 113 if isinstance(_dashboard_data, Unset): 114 dashboard_data = UNSET 115 else: 116 dashboard_data = DashboardDashboardData.from_dict(_dashboard_data) 117 118 _info = d.pop("info", UNSET) 119 info: DashboardInfo | Unset 120 if isinstance(_info, Unset): 121 info = UNSET 122 else: 123 info = DashboardInfo.from_dict(_info) 124 125 dashboard = cls( 126 id=id, 127 name=name, 128 description=description, 129 process_ids=process_ids, 130 created_by=created_by, 131 created_at=created_at, 132 updated_at=updated_at, 133 dashboard_data=dashboard_data, 134 info=info, 135 ) 136 137 dashboard.additional_properties = d 138 return dashboard 139 140 @property 141 def additional_keys(self) -> list[str]: 142 return list(self.additional_properties.keys()) 143 144 def __getitem__(self, key: str) -> Any: 145 return self.additional_properties[key] 146 147 def __setitem__(self, key: str, value: Any) -> None: 148 self.additional_properties[key] = value 149 150 def __delitem__(self, key: str) -> None: 151 del self.additional_properties[key] 152 153 def __contains__(self, key: str) -> bool: 154 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- description (str):
- process_ids (list[str]):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- dashboard_data (DashboardDashboardData | Unset):
- info (DashboardInfo | Unset):
32def __init__(self, id, name, description, process_ids, created_by, created_at, updated_at, dashboard_data=attr_dict['dashboard_data'].default, info=attr_dict['info'].default): 33 self.id = id 34 self.name = name 35 self.description = description 36 self.process_ids = process_ids 37 self.created_by = created_by 38 self.created_at = created_at 39 self.updated_at = updated_at 40 self.dashboard_data = dashboard_data 41 self.info = info 42 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Dashboard.
48 def to_dict(self) -> dict[str, Any]: 49 id = self.id 50 51 name = self.name 52 53 description = self.description 54 55 process_ids = self.process_ids 56 57 created_by = self.created_by 58 59 created_at = self.created_at.isoformat() 60 61 updated_at = self.updated_at.isoformat() 62 63 dashboard_data: dict[str, Any] | Unset = UNSET 64 if not isinstance(self.dashboard_data, Unset): 65 dashboard_data = self.dashboard_data.to_dict() 66 67 info: dict[str, Any] | Unset = UNSET 68 if not isinstance(self.info, Unset): 69 info = self.info.to_dict() 70 71 field_dict: dict[str, Any] = {} 72 field_dict.update(self.additional_properties) 73 field_dict.update( 74 { 75 "id": id, 76 "name": name, 77 "description": description, 78 "processIds": process_ids, 79 "createdBy": created_by, 80 "createdAt": created_at, 81 "updatedAt": updated_at, 82 } 83 ) 84 if dashboard_data is not UNSET: 85 field_dict["dashboardData"] = dashboard_data 86 if info is not UNSET: 87 field_dict["info"] = info 88 89 return field_dict
91 @classmethod 92 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 93 from ..models.dashboard_dashboard_data import DashboardDashboardData 94 from ..models.dashboard_info import DashboardInfo 95 96 d = dict(src_dict) 97 id = d.pop("id") 98 99 name = d.pop("name") 100 101 description = d.pop("description") 102 103 process_ids = cast(list[str], d.pop("processIds")) 104 105 created_by = d.pop("createdBy") 106 107 created_at = isoparse(d.pop("createdAt")) 108 109 updated_at = isoparse(d.pop("updatedAt")) 110 111 _dashboard_data = d.pop("dashboardData", UNSET) 112 dashboard_data: DashboardDashboardData | Unset 113 if isinstance(_dashboard_data, Unset): 114 dashboard_data = UNSET 115 else: 116 dashboard_data = DashboardDashboardData.from_dict(_dashboard_data) 117 118 _info = d.pop("info", UNSET) 119 info: DashboardInfo | Unset 120 if isinstance(_info, Unset): 121 info = UNSET 122 else: 123 info = DashboardInfo.from_dict(_info) 124 125 dashboard = cls( 126 id=id, 127 name=name, 128 description=description, 129 process_ids=process_ids, 130 created_by=created_by, 131 created_at=created_at, 132 updated_at=updated_at, 133 dashboard_data=dashboard_data, 134 info=info, 135 ) 136 137 dashboard.additional_properties = d 138 return dashboard
13@_attrs_define 14class DashboardDashboardData: 15 """ """ 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 dashboard_dashboard_data = cls() 29 30 dashboard_dashboard_data.additional_properties = d 31 return dashboard_dashboard_data 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
13@_attrs_define 14class DashboardInfo: 15 """ """ 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 dashboard_info = cls() 29 30 dashboard_info.additional_properties = d 31 return dashboard_info 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
20@_attrs_define 21class DashboardRequest: 22 """ 23 Attributes: 24 name (str): 25 description (str): 26 process_ids (list[str]): 27 dashboard_data (DashboardRequestDashboardData | Unset): 28 info (DashboardRequestInfo | Unset): 29 """ 30 31 name: str 32 description: str 33 process_ids: list[str] 34 dashboard_data: DashboardRequestDashboardData | Unset = UNSET 35 info: DashboardRequestInfo | Unset = UNSET 36 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 37 38 def to_dict(self) -> dict[str, Any]: 39 name = self.name 40 41 description = self.description 42 43 process_ids = self.process_ids 44 45 dashboard_data: dict[str, Any] | Unset = UNSET 46 if not isinstance(self.dashboard_data, Unset): 47 dashboard_data = self.dashboard_data.to_dict() 48 49 info: dict[str, Any] | Unset = UNSET 50 if not isinstance(self.info, Unset): 51 info = self.info.to_dict() 52 53 field_dict: dict[str, Any] = {} 54 field_dict.update(self.additional_properties) 55 field_dict.update( 56 { 57 "name": name, 58 "description": description, 59 "processIds": process_ids, 60 } 61 ) 62 if dashboard_data is not UNSET: 63 field_dict["dashboardData"] = dashboard_data 64 if info is not UNSET: 65 field_dict["info"] = info 66 67 return field_dict 68 69 @classmethod 70 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 71 from ..models.dashboard_request_dashboard_data import DashboardRequestDashboardData 72 from ..models.dashboard_request_info import DashboardRequestInfo 73 74 d = dict(src_dict) 75 name = d.pop("name") 76 77 description = d.pop("description") 78 79 process_ids = cast(list[str], d.pop("processIds")) 80 81 _dashboard_data = d.pop("dashboardData", UNSET) 82 dashboard_data: DashboardRequestDashboardData | Unset 83 if isinstance(_dashboard_data, Unset): 84 dashboard_data = UNSET 85 else: 86 dashboard_data = DashboardRequestDashboardData.from_dict(_dashboard_data) 87 88 _info = d.pop("info", UNSET) 89 info: DashboardRequestInfo | Unset 90 if isinstance(_info, Unset): 91 info = UNSET 92 else: 93 info = DashboardRequestInfo.from_dict(_info) 94 95 dashboard_request = cls( 96 name=name, 97 description=description, 98 process_ids=process_ids, 99 dashboard_data=dashboard_data, 100 info=info, 101 ) 102 103 dashboard_request.additional_properties = d 104 return dashboard_request 105 106 @property 107 def additional_keys(self) -> list[str]: 108 return list(self.additional_properties.keys()) 109 110 def __getitem__(self, key: str) -> Any: 111 return self.additional_properties[key] 112 113 def __setitem__(self, key: str, value: Any) -> None: 114 self.additional_properties[key] = value 115 116 def __delitem__(self, key: str) -> None: 117 del self.additional_properties[key] 118 119 def __contains__(self, key: str) -> bool: 120 return key in self.additional_properties
Attributes:
- name (str):
- description (str):
- process_ids (list[str]):
- dashboard_data (DashboardRequestDashboardData | Unset):
- info (DashboardRequestInfo | Unset):
28def __init__(self, name, description, process_ids, dashboard_data=attr_dict['dashboard_data'].default, info=attr_dict['info'].default): 29 self.name = name 30 self.description = description 31 self.process_ids = process_ids 32 self.dashboard_data = dashboard_data 33 self.info = info 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DashboardRequest.
38 def to_dict(self) -> dict[str, Any]: 39 name = self.name 40 41 description = self.description 42 43 process_ids = self.process_ids 44 45 dashboard_data: dict[str, Any] | Unset = UNSET 46 if not isinstance(self.dashboard_data, Unset): 47 dashboard_data = self.dashboard_data.to_dict() 48 49 info: dict[str, Any] | Unset = UNSET 50 if not isinstance(self.info, Unset): 51 info = self.info.to_dict() 52 53 field_dict: dict[str, Any] = {} 54 field_dict.update(self.additional_properties) 55 field_dict.update( 56 { 57 "name": name, 58 "description": description, 59 "processIds": process_ids, 60 } 61 ) 62 if dashboard_data is not UNSET: 63 field_dict["dashboardData"] = dashboard_data 64 if info is not UNSET: 65 field_dict["info"] = info 66 67 return field_dict
69 @classmethod 70 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 71 from ..models.dashboard_request_dashboard_data import DashboardRequestDashboardData 72 from ..models.dashboard_request_info import DashboardRequestInfo 73 74 d = dict(src_dict) 75 name = d.pop("name") 76 77 description = d.pop("description") 78 79 process_ids = cast(list[str], d.pop("processIds")) 80 81 _dashboard_data = d.pop("dashboardData", UNSET) 82 dashboard_data: DashboardRequestDashboardData | Unset 83 if isinstance(_dashboard_data, Unset): 84 dashboard_data = UNSET 85 else: 86 dashboard_data = DashboardRequestDashboardData.from_dict(_dashboard_data) 87 88 _info = d.pop("info", UNSET) 89 info: DashboardRequestInfo | Unset 90 if isinstance(_info, Unset): 91 info = UNSET 92 else: 93 info = DashboardRequestInfo.from_dict(_info) 94 95 dashboard_request = cls( 96 name=name, 97 description=description, 98 process_ids=process_ids, 99 dashboard_data=dashboard_data, 100 info=info, 101 ) 102 103 dashboard_request.additional_properties = d 104 return dashboard_request
13@_attrs_define 14class DashboardRequestDashboardData: 15 """ """ 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 dashboard_request_dashboard_data = cls() 29 30 dashboard_request_dashboard_data.additional_properties = d 31 return dashboard_request_dashboard_data 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
13@_attrs_define 14class DashboardRequestInfo: 15 """ """ 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 dashboard_request_info = cls() 29 30 dashboard_request_info.additional_properties = d 31 return dashboard_request_info 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
17@_attrs_define 18class DataFile: 19 """ 20 Attributes: 21 path (str): 22 metadata (DataFileMetadata): 23 """ 24 25 path: str 26 metadata: DataFileMetadata 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 path = self.path 31 32 metadata = self.metadata.to_dict() 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "path": path, 39 "metadata": metadata, 40 } 41 ) 42 43 return field_dict 44 45 @classmethod 46 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 47 from ..models.data_file_metadata import DataFileMetadata 48 49 d = dict(src_dict) 50 path = d.pop("path") 51 52 metadata = DataFileMetadata.from_dict(d.pop("metadata")) 53 54 data_file = cls( 55 path=path, 56 metadata=metadata, 57 ) 58 59 data_file.additional_properties = d 60 return data_file 61 62 @property 63 def additional_keys(self) -> list[str]: 64 return list(self.additional_properties.keys()) 65 66 def __getitem__(self, key: str) -> Any: 67 return self.additional_properties[key] 68 69 def __setitem__(self, key: str, value: Any) -> None: 70 self.additional_properties[key] = value 71 72 def __delitem__(self, key: str) -> None: 73 del self.additional_properties[key] 74 75 def __contains__(self, key: str) -> bool: 76 return key in self.additional_properties
Attributes:
- path (str):
- metadata (DataFileMetadata):
25def __init__(self, path, metadata): 26 self.path = path 27 self.metadata = metadata 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DataFile.
45 @classmethod 46 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 47 from ..models.data_file_metadata import DataFileMetadata 48 49 d = dict(src_dict) 50 path = d.pop("path") 51 52 metadata = DataFileMetadata.from_dict(d.pop("metadata")) 53 54 data_file = cls( 55 path=path, 56 metadata=metadata, 57 ) 58 59 data_file.additional_properties = d 60 return data_file
13@_attrs_define 14class DataFileMetadata: 15 """ """ 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 data_file_metadata = cls() 29 30 data_file_metadata.additional_properties = d 31 return data_file_metadata 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
21@_attrs_define 22class Dataset: 23 """ 24 Attributes: 25 id (str): 26 name (str): 27 description (str): 28 project_id (str): 29 process_id (str): 30 source_dataset_ids (list[str]): 31 status (Status): 32 tags (list[Tag]): 33 created_by (str): 34 created_at (datetime.datetime): 35 updated_at (datetime.datetime): 36 """ 37 38 id: str 39 name: str 40 description: str 41 project_id: str 42 process_id: str 43 source_dataset_ids: list[str] 44 status: Status 45 tags: list[Tag] 46 created_by: str 47 created_at: datetime.datetime 48 updated_at: datetime.datetime 49 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 50 51 def to_dict(self) -> dict[str, Any]: 52 id = self.id 53 54 name = self.name 55 56 description = self.description 57 58 project_id = self.project_id 59 60 process_id = self.process_id 61 62 source_dataset_ids = self.source_dataset_ids 63 64 status = self.status.value 65 66 tags = [] 67 for tags_item_data in self.tags: 68 tags_item = tags_item_data.to_dict() 69 tags.append(tags_item) 70 71 created_by = self.created_by 72 73 created_at = self.created_at.isoformat() 74 75 updated_at = self.updated_at.isoformat() 76 77 field_dict: dict[str, Any] = {} 78 field_dict.update(self.additional_properties) 79 field_dict.update( 80 { 81 "id": id, 82 "name": name, 83 "description": description, 84 "projectId": project_id, 85 "processId": process_id, 86 "sourceDatasetIds": source_dataset_ids, 87 "status": status, 88 "tags": tags, 89 "createdBy": created_by, 90 "createdAt": created_at, 91 "updatedAt": updated_at, 92 } 93 ) 94 95 return field_dict 96 97 @classmethod 98 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 99 from ..models.tag import Tag 100 101 d = dict(src_dict) 102 id = d.pop("id") 103 104 name = d.pop("name") 105 106 description = d.pop("description") 107 108 project_id = d.pop("projectId") 109 110 process_id = d.pop("processId") 111 112 source_dataset_ids = cast(list[str], d.pop("sourceDatasetIds")) 113 114 status = Status(d.pop("status")) 115 116 tags = [] 117 _tags = d.pop("tags") 118 for tags_item_data in _tags: 119 tags_item = Tag.from_dict(tags_item_data) 120 121 tags.append(tags_item) 122 123 created_by = d.pop("createdBy") 124 125 created_at = isoparse(d.pop("createdAt")) 126 127 updated_at = isoparse(d.pop("updatedAt")) 128 129 dataset = cls( 130 id=id, 131 name=name, 132 description=description, 133 project_id=project_id, 134 process_id=process_id, 135 source_dataset_ids=source_dataset_ids, 136 status=status, 137 tags=tags, 138 created_by=created_by, 139 created_at=created_at, 140 updated_at=updated_at, 141 ) 142 143 dataset.additional_properties = d 144 return dataset 145 146 @property 147 def additional_keys(self) -> list[str]: 148 return list(self.additional_properties.keys()) 149 150 def __getitem__(self, key: str) -> Any: 151 return self.additional_properties[key] 152 153 def __setitem__(self, key: str, value: Any) -> None: 154 self.additional_properties[key] = value 155 156 def __delitem__(self, key: str) -> None: 157 del self.additional_properties[key] 158 159 def __contains__(self, key: str) -> bool: 160 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- description (str):
- project_id (str):
- process_id (str):
- source_dataset_ids (list[str]):
- status (Status):
- tags (list[Tag]):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
34def __init__(self, id, name, description, project_id, process_id, source_dataset_ids, status, tags, created_by, created_at, updated_at): 35 self.id = id 36 self.name = name 37 self.description = description 38 self.project_id = project_id 39 self.process_id = process_id 40 self.source_dataset_ids = source_dataset_ids 41 self.status = status 42 self.tags = tags 43 self.created_by = created_by 44 self.created_at = created_at 45 self.updated_at = updated_at 46 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Dataset.
51 def to_dict(self) -> dict[str, Any]: 52 id = self.id 53 54 name = self.name 55 56 description = self.description 57 58 project_id = self.project_id 59 60 process_id = self.process_id 61 62 source_dataset_ids = self.source_dataset_ids 63 64 status = self.status.value 65 66 tags = [] 67 for tags_item_data in self.tags: 68 tags_item = tags_item_data.to_dict() 69 tags.append(tags_item) 70 71 created_by = self.created_by 72 73 created_at = self.created_at.isoformat() 74 75 updated_at = self.updated_at.isoformat() 76 77 field_dict: dict[str, Any] = {} 78 field_dict.update(self.additional_properties) 79 field_dict.update( 80 { 81 "id": id, 82 "name": name, 83 "description": description, 84 "projectId": project_id, 85 "processId": process_id, 86 "sourceDatasetIds": source_dataset_ids, 87 "status": status, 88 "tags": tags, 89 "createdBy": created_by, 90 "createdAt": created_at, 91 "updatedAt": updated_at, 92 } 93 ) 94 95 return field_dict
97 @classmethod 98 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 99 from ..models.tag import Tag 100 101 d = dict(src_dict) 102 id = d.pop("id") 103 104 name = d.pop("name") 105 106 description = d.pop("description") 107 108 project_id = d.pop("projectId") 109 110 process_id = d.pop("processId") 111 112 source_dataset_ids = cast(list[str], d.pop("sourceDatasetIds")) 113 114 status = Status(d.pop("status")) 115 116 tags = [] 117 _tags = d.pop("tags") 118 for tags_item_data in _tags: 119 tags_item = Tag.from_dict(tags_item_data) 120 121 tags.append(tags_item) 122 123 created_by = d.pop("createdBy") 124 125 created_at = isoparse(d.pop("createdAt")) 126 127 updated_at = isoparse(d.pop("updatedAt")) 128 129 dataset = cls( 130 id=id, 131 name=name, 132 description=description, 133 project_id=project_id, 134 process_id=process_id, 135 source_dataset_ids=source_dataset_ids, 136 status=status, 137 tags=tags, 138 created_by=created_by, 139 created_at=created_at, 140 updated_at=updated_at, 141 ) 142 143 dataset.additional_properties = d 144 return dataset
22@_attrs_define 23class DatasetAssetsManifest: 24 """ 25 Attributes: 26 domain (str | Unset): Base URL for files Example: s3://project-1a1a/datasets/1a1a. 27 files (list[FileEntry] | Unset): List of files in the dataset, including metadata 28 total_files (int | Unset): Total number of files in the dataset, used for pagination 29 viz (list[DatasetViz] | Unset): List of viz to render for the dataset 30 tables (list[Table] | Unset): List of web optimized tables for the dataset 31 artifacts (list[Artifact] | Unset): Artifacts associated with the dataset 32 """ 33 34 domain: str | Unset = UNSET 35 files: list[FileEntry] | Unset = UNSET 36 total_files: int | Unset = UNSET 37 viz: list[DatasetViz] | Unset = UNSET 38 tables: list[Table] | Unset = UNSET 39 artifacts: list[Artifact] | Unset = UNSET 40 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 41 42 def to_dict(self) -> dict[str, Any]: 43 domain = self.domain 44 45 files: list[dict[str, Any]] | Unset = UNSET 46 if not isinstance(self.files, Unset): 47 files = [] 48 for files_item_data in self.files: 49 files_item = files_item_data.to_dict() 50 files.append(files_item) 51 52 total_files = self.total_files 53 54 viz: list[dict[str, Any]] | Unset = UNSET 55 if not isinstance(self.viz, Unset): 56 viz = [] 57 for viz_item_data in self.viz: 58 viz_item = viz_item_data.to_dict() 59 viz.append(viz_item) 60 61 tables: list[dict[str, Any]] | Unset = UNSET 62 if not isinstance(self.tables, Unset): 63 tables = [] 64 for tables_item_data in self.tables: 65 tables_item = tables_item_data.to_dict() 66 tables.append(tables_item) 67 68 artifacts: list[dict[str, Any]] | Unset = UNSET 69 if not isinstance(self.artifacts, Unset): 70 artifacts = [] 71 for artifacts_item_data in self.artifacts: 72 artifacts_item = artifacts_item_data.to_dict() 73 artifacts.append(artifacts_item) 74 75 field_dict: dict[str, Any] = {} 76 field_dict.update(self.additional_properties) 77 field_dict.update({}) 78 if domain is not UNSET: 79 field_dict["domain"] = domain 80 if files is not UNSET: 81 field_dict["files"] = files 82 if total_files is not UNSET: 83 field_dict["totalFiles"] = total_files 84 if viz is not UNSET: 85 field_dict["viz"] = viz 86 if tables is not UNSET: 87 field_dict["tables"] = tables 88 if artifacts is not UNSET: 89 field_dict["artifacts"] = artifacts 90 91 return field_dict 92 93 @classmethod 94 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 95 from ..models.artifact import Artifact 96 from ..models.dataset_viz import DatasetViz 97 from ..models.file_entry import FileEntry 98 from ..models.table import Table 99 100 d = dict(src_dict) 101 domain = d.pop("domain", UNSET) 102 103 _files = d.pop("files", UNSET) 104 files: list[FileEntry] | Unset = UNSET 105 if _files is not UNSET: 106 files = [] 107 for files_item_data in _files: 108 files_item = FileEntry.from_dict(files_item_data) 109 110 files.append(files_item) 111 112 total_files = d.pop("totalFiles", UNSET) 113 114 _viz = d.pop("viz", UNSET) 115 viz: list[DatasetViz] | Unset = UNSET 116 if _viz is not UNSET: 117 viz = [] 118 for viz_item_data in _viz: 119 viz_item = DatasetViz.from_dict(viz_item_data) 120 121 viz.append(viz_item) 122 123 _tables = d.pop("tables", UNSET) 124 tables: list[Table] | Unset = UNSET 125 if _tables is not UNSET: 126 tables = [] 127 for tables_item_data in _tables: 128 tables_item = Table.from_dict(tables_item_data) 129 130 tables.append(tables_item) 131 132 _artifacts = d.pop("artifacts", UNSET) 133 artifacts: list[Artifact] | Unset = UNSET 134 if _artifacts is not UNSET: 135 artifacts = [] 136 for artifacts_item_data in _artifacts: 137 artifacts_item = Artifact.from_dict(artifacts_item_data) 138 139 artifacts.append(artifacts_item) 140 141 dataset_assets_manifest = cls( 142 domain=domain, 143 files=files, 144 total_files=total_files, 145 viz=viz, 146 tables=tables, 147 artifacts=artifacts, 148 ) 149 150 dataset_assets_manifest.additional_properties = d 151 return dataset_assets_manifest 152 153 @property 154 def additional_keys(self) -> list[str]: 155 return list(self.additional_properties.keys()) 156 157 def __getitem__(self, key: str) -> Any: 158 return self.additional_properties[key] 159 160 def __setitem__(self, key: str, value: Any) -> None: 161 self.additional_properties[key] = value 162 163 def __delitem__(self, key: str) -> None: 164 del self.additional_properties[key] 165 166 def __contains__(self, key: str) -> bool: 167 return key in self.additional_properties
Attributes:
- domain (str | Unset): Base URL for files Example: s3://project-1a1a/datasets/1a1a.
- files (list[FileEntry] | Unset): List of files in the dataset, including metadata
- total_files (int | Unset): Total number of files in the dataset, used for pagination
- viz (list[DatasetViz] | Unset): List of viz to render for the dataset
- tables (list[Table] | Unset): List of web optimized tables for the dataset
- artifacts (list[Artifact] | Unset): Artifacts associated with the dataset
29def __init__(self, domain=attr_dict['domain'].default, files=attr_dict['files'].default, total_files=attr_dict['total_files'].default, viz=attr_dict['viz'].default, tables=attr_dict['tables'].default, artifacts=attr_dict['artifacts'].default): 30 self.domain = domain 31 self.files = files 32 self.total_files = total_files 33 self.viz = viz 34 self.tables = tables 35 self.artifacts = artifacts 36 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DatasetAssetsManifest.
42 def to_dict(self) -> dict[str, Any]: 43 domain = self.domain 44 45 files: list[dict[str, Any]] | Unset = UNSET 46 if not isinstance(self.files, Unset): 47 files = [] 48 for files_item_data in self.files: 49 files_item = files_item_data.to_dict() 50 files.append(files_item) 51 52 total_files = self.total_files 53 54 viz: list[dict[str, Any]] | Unset = UNSET 55 if not isinstance(self.viz, Unset): 56 viz = [] 57 for viz_item_data in self.viz: 58 viz_item = viz_item_data.to_dict() 59 viz.append(viz_item) 60 61 tables: list[dict[str, Any]] | Unset = UNSET 62 if not isinstance(self.tables, Unset): 63 tables = [] 64 for tables_item_data in self.tables: 65 tables_item = tables_item_data.to_dict() 66 tables.append(tables_item) 67 68 artifacts: list[dict[str, Any]] | Unset = UNSET 69 if not isinstance(self.artifacts, Unset): 70 artifacts = [] 71 for artifacts_item_data in self.artifacts: 72 artifacts_item = artifacts_item_data.to_dict() 73 artifacts.append(artifacts_item) 74 75 field_dict: dict[str, Any] = {} 76 field_dict.update(self.additional_properties) 77 field_dict.update({}) 78 if domain is not UNSET: 79 field_dict["domain"] = domain 80 if files is not UNSET: 81 field_dict["files"] = files 82 if total_files is not UNSET: 83 field_dict["totalFiles"] = total_files 84 if viz is not UNSET: 85 field_dict["viz"] = viz 86 if tables is not UNSET: 87 field_dict["tables"] = tables 88 if artifacts is not UNSET: 89 field_dict["artifacts"] = artifacts 90 91 return field_dict
93 @classmethod 94 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 95 from ..models.artifact import Artifact 96 from ..models.dataset_viz import DatasetViz 97 from ..models.file_entry import FileEntry 98 from ..models.table import Table 99 100 d = dict(src_dict) 101 domain = d.pop("domain", UNSET) 102 103 _files = d.pop("files", UNSET) 104 files: list[FileEntry] | Unset = UNSET 105 if _files is not UNSET: 106 files = [] 107 for files_item_data in _files: 108 files_item = FileEntry.from_dict(files_item_data) 109 110 files.append(files_item) 111 112 total_files = d.pop("totalFiles", UNSET) 113 114 _viz = d.pop("viz", UNSET) 115 viz: list[DatasetViz] | Unset = UNSET 116 if _viz is not UNSET: 117 viz = [] 118 for viz_item_data in _viz: 119 viz_item = DatasetViz.from_dict(viz_item_data) 120 121 viz.append(viz_item) 122 123 _tables = d.pop("tables", UNSET) 124 tables: list[Table] | Unset = UNSET 125 if _tables is not UNSET: 126 tables = [] 127 for tables_item_data in _tables: 128 tables_item = Table.from_dict(tables_item_data) 129 130 tables.append(tables_item) 131 132 _artifacts = d.pop("artifacts", UNSET) 133 artifacts: list[Artifact] | Unset = UNSET 134 if _artifacts is not UNSET: 135 artifacts = [] 136 for artifacts_item_data in _artifacts: 137 artifacts_item = Artifact.from_dict(artifacts_item_data) 138 139 artifacts.append(artifacts_item) 140 141 dataset_assets_manifest = cls( 142 domain=domain, 143 files=files, 144 total_files=total_files, 145 viz=viz, 146 tables=tables, 147 artifacts=artifacts, 148 ) 149 150 dataset_assets_manifest.additional_properties = d 151 return dataset_assets_manifest
15@_attrs_define 16class DatasetCondition: 17 """ 18 Attributes: 19 field (DatasetConditionField): 20 value (str): 21 """ 22 23 field: DatasetConditionField 24 value: str 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 field = self.field.value 29 30 value = self.value 31 32 field_dict: dict[str, Any] = {} 33 field_dict.update(self.additional_properties) 34 field_dict.update( 35 { 36 "field": field, 37 "value": value, 38 } 39 ) 40 41 return field_dict 42 43 @classmethod 44 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 45 d = dict(src_dict) 46 field = DatasetConditionField(d.pop("field")) 47 48 value = d.pop("value") 49 50 dataset_condition = cls( 51 field=field, 52 value=value, 53 ) 54 55 dataset_condition.additional_properties = d 56 return dataset_condition 57 58 @property 59 def additional_keys(self) -> list[str]: 60 return list(self.additional_properties.keys()) 61 62 def __getitem__(self, key: str) -> Any: 63 return self.additional_properties[key] 64 65 def __setitem__(self, key: str, value: Any) -> None: 66 self.additional_properties[key] = value 67 68 def __delitem__(self, key: str) -> None: 69 del self.additional_properties[key] 70 71 def __contains__(self, key: str) -> bool: 72 return key in self.additional_properties
Attributes:
- field (DatasetConditionField):
- value (str):
25def __init__(self, field, value): 26 self.field = field 27 self.value = value 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DatasetCondition.
43 @classmethod 44 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 45 d = dict(src_dict) 46 field = DatasetConditionField(d.pop("field")) 47 48 value = d.pop("value") 49 50 dataset_condition = cls( 51 field=field, 52 value=value, 53 ) 54 55 dataset_condition.additional_properties = d 56 return dataset_condition
5class DatasetConditionField(str, Enum): 6 CREATED_BY = "CREATED_BY" 7 DATASET_ID = "DATASET_ID" 8 PROCESS_ID = "PROCESS_ID" 9 TAG = "TAG" 10 UNKNOWN = "UNKNOWN" 11 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 12 13 def __str__(self) -> str: 14 return str(self.value) 15 16 @classmethod 17 def _missing_(cls, number): 18 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
26@_attrs_define 27class DatasetDetail: 28 """ 29 Attributes: 30 id (str): 31 name (str): 32 description (str): 33 s3 (str): 34 process_id (str): 35 project_id (str): 36 source_dataset_ids (list[str]): 37 source_datasets (list[NamedItem]): 38 source_sample_ids (list[str]): 39 source_sample_files_map (DatasetDetailSourceSampleFilesMap): Keys are sampleIds, and the lists are file paths to 40 include. 41 status (Status): 42 status_message (str): 43 tags (list[Tag]): 44 params (DatasetDetailParams): 45 info (DatasetDetailInfo): 46 is_view_restricted (bool): 47 created_by (str): 48 created_at (datetime.datetime): 49 updated_at (datetime.datetime): 50 originating_project_id (str | Unset): The originating project ID might be different if the dataset was shared 51 from another project. 52 share (NamedItem | None | Unset): 53 total_size_bytes (int | None | Unset): Total size of dataset files (in bytes) 54 """ 55 56 id: str 57 name: str 58 description: str 59 s3: str 60 process_id: str 61 project_id: str 62 source_dataset_ids: list[str] 63 source_datasets: list[NamedItem] 64 source_sample_ids: list[str] 65 source_sample_files_map: DatasetDetailSourceSampleFilesMap 66 status: Status 67 status_message: str 68 tags: list[Tag] 69 params: DatasetDetailParams 70 info: DatasetDetailInfo 71 is_view_restricted: bool 72 created_by: str 73 created_at: datetime.datetime 74 updated_at: datetime.datetime 75 originating_project_id: str | Unset = UNSET 76 share: NamedItem | None | Unset = UNSET 77 total_size_bytes: int | None | Unset = UNSET 78 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 79 80 def to_dict(self) -> dict[str, Any]: 81 from ..models.named_item import NamedItem 82 83 id = self.id 84 85 name = self.name 86 87 description = self.description 88 89 s3 = self.s3 90 91 process_id = self.process_id 92 93 project_id = self.project_id 94 95 source_dataset_ids = self.source_dataset_ids 96 97 source_datasets = [] 98 for source_datasets_item_data in self.source_datasets: 99 source_datasets_item = source_datasets_item_data.to_dict() 100 source_datasets.append(source_datasets_item) 101 102 source_sample_ids = self.source_sample_ids 103 104 source_sample_files_map = self.source_sample_files_map.to_dict() 105 106 status = self.status.value 107 108 status_message = self.status_message 109 110 tags = [] 111 for tags_item_data in self.tags: 112 tags_item = tags_item_data.to_dict() 113 tags.append(tags_item) 114 115 params = self.params.to_dict() 116 117 info = self.info.to_dict() 118 119 is_view_restricted = self.is_view_restricted 120 121 created_by = self.created_by 122 123 created_at = self.created_at.isoformat() 124 125 updated_at = self.updated_at.isoformat() 126 127 originating_project_id = self.originating_project_id 128 129 share: dict[str, Any] | None | Unset 130 if isinstance(self.share, Unset): 131 share = UNSET 132 elif isinstance(self.share, NamedItem): 133 share = self.share.to_dict() 134 else: 135 share = self.share 136 137 total_size_bytes: int | None | Unset 138 if isinstance(self.total_size_bytes, Unset): 139 total_size_bytes = UNSET 140 else: 141 total_size_bytes = self.total_size_bytes 142 143 field_dict: dict[str, Any] = {} 144 field_dict.update(self.additional_properties) 145 field_dict.update( 146 { 147 "id": id, 148 "name": name, 149 "description": description, 150 "s3": s3, 151 "processId": process_id, 152 "projectId": project_id, 153 "sourceDatasetIds": source_dataset_ids, 154 "sourceDatasets": source_datasets, 155 "sourceSampleIds": source_sample_ids, 156 "sourceSampleFilesMap": source_sample_files_map, 157 "status": status, 158 "statusMessage": status_message, 159 "tags": tags, 160 "params": params, 161 "info": info, 162 "isViewRestricted": is_view_restricted, 163 "createdBy": created_by, 164 "createdAt": created_at, 165 "updatedAt": updated_at, 166 } 167 ) 168 if originating_project_id is not UNSET: 169 field_dict["originatingProjectId"] = originating_project_id 170 if share is not UNSET: 171 field_dict["share"] = share 172 if total_size_bytes is not UNSET: 173 field_dict["totalSizeBytes"] = total_size_bytes 174 175 return field_dict 176 177 @classmethod 178 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 179 from ..models.dataset_detail_info import DatasetDetailInfo 180 from ..models.dataset_detail_params import DatasetDetailParams 181 from ..models.dataset_detail_source_sample_files_map import DatasetDetailSourceSampleFilesMap 182 from ..models.named_item import NamedItem 183 from ..models.tag import Tag 184 185 d = dict(src_dict) 186 id = d.pop("id") 187 188 name = d.pop("name") 189 190 description = d.pop("description") 191 192 s3 = d.pop("s3") 193 194 process_id = d.pop("processId") 195 196 project_id = d.pop("projectId") 197 198 source_dataset_ids = cast(list[str], d.pop("sourceDatasetIds")) 199 200 source_datasets = [] 201 _source_datasets = d.pop("sourceDatasets") 202 for source_datasets_item_data in _source_datasets: 203 source_datasets_item = NamedItem.from_dict(source_datasets_item_data) 204 205 source_datasets.append(source_datasets_item) 206 207 source_sample_ids = cast(list[str], d.pop("sourceSampleIds")) 208 209 source_sample_files_map = DatasetDetailSourceSampleFilesMap.from_dict(d.pop("sourceSampleFilesMap")) 210 211 status = Status(d.pop("status")) 212 213 status_message = d.pop("statusMessage") 214 215 tags = [] 216 _tags = d.pop("tags") 217 for tags_item_data in _tags: 218 tags_item = Tag.from_dict(tags_item_data) 219 220 tags.append(tags_item) 221 222 params = DatasetDetailParams.from_dict(d.pop("params")) 223 224 info = DatasetDetailInfo.from_dict(d.pop("info")) 225 226 is_view_restricted = d.pop("isViewRestricted") 227 228 created_by = d.pop("createdBy") 229 230 created_at = isoparse(d.pop("createdAt")) 231 232 updated_at = isoparse(d.pop("updatedAt")) 233 234 originating_project_id = d.pop("originatingProjectId", UNSET) 235 236 def _parse_share(data: object) -> NamedItem | None | Unset: 237 if data is None: 238 return data 239 if isinstance(data, Unset): 240 return data 241 try: 242 if not isinstance(data, dict): 243 raise TypeError() 244 share_type_1 = NamedItem.from_dict(data) 245 246 return share_type_1 247 except (TypeError, ValueError, AttributeError, KeyError): 248 pass 249 return cast(NamedItem | None | Unset, data) 250 251 share = _parse_share(d.pop("share", UNSET)) 252 253 def _parse_total_size_bytes(data: object) -> int | None | Unset: 254 if data is None: 255 return data 256 if isinstance(data, Unset): 257 return data 258 return cast(int | None | Unset, data) 259 260 total_size_bytes = _parse_total_size_bytes(d.pop("totalSizeBytes", UNSET)) 261 262 dataset_detail = cls( 263 id=id, 264 name=name, 265 description=description, 266 s3=s3, 267 process_id=process_id, 268 project_id=project_id, 269 source_dataset_ids=source_dataset_ids, 270 source_datasets=source_datasets, 271 source_sample_ids=source_sample_ids, 272 source_sample_files_map=source_sample_files_map, 273 status=status, 274 status_message=status_message, 275 tags=tags, 276 params=params, 277 info=info, 278 is_view_restricted=is_view_restricted, 279 created_by=created_by, 280 created_at=created_at, 281 updated_at=updated_at, 282 originating_project_id=originating_project_id, 283 share=share, 284 total_size_bytes=total_size_bytes, 285 ) 286 287 dataset_detail.additional_properties = d 288 return dataset_detail 289 290 @property 291 def additional_keys(self) -> list[str]: 292 return list(self.additional_properties.keys()) 293 294 def __getitem__(self, key: str) -> Any: 295 return self.additional_properties[key] 296 297 def __setitem__(self, key: str, value: Any) -> None: 298 self.additional_properties[key] = value 299 300 def __delitem__(self, key: str) -> None: 301 del self.additional_properties[key] 302 303 def __contains__(self, key: str) -> bool: 304 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- description (str):
- s3 (str):
- process_id (str):
- project_id (str):
- source_dataset_ids (list[str]):
- source_datasets (list[NamedItem]):
- source_sample_ids (list[str]):
- source_sample_files_map (DatasetDetailSourceSampleFilesMap): Keys are sampleIds, and the lists are file paths to include.
- status (Status):
- status_message (str):
- tags (list[Tag]):
- params (DatasetDetailParams):
- info (DatasetDetailInfo):
- is_view_restricted (bool):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- originating_project_id (str | Unset): The originating project ID might be different if the dataset was shared from another project.
- share (NamedItem | None | Unset):
- total_size_bytes (int | None | Unset): Total size of dataset files (in bytes)
45def __init__(self, id, name, description, s3, process_id, project_id, source_dataset_ids, source_datasets, source_sample_ids, source_sample_files_map, status, status_message, tags, params, info, is_view_restricted, created_by, created_at, updated_at, originating_project_id=attr_dict['originating_project_id'].default, share=attr_dict['share'].default, total_size_bytes=attr_dict['total_size_bytes'].default): 46 self.id = id 47 self.name = name 48 self.description = description 49 self.s3 = s3 50 self.process_id = process_id 51 self.project_id = project_id 52 self.source_dataset_ids = source_dataset_ids 53 self.source_datasets = source_datasets 54 self.source_sample_ids = source_sample_ids 55 self.source_sample_files_map = source_sample_files_map 56 self.status = status 57 self.status_message = status_message 58 self.tags = tags 59 self.params = params 60 self.info = info 61 self.is_view_restricted = is_view_restricted 62 self.created_by = created_by 63 self.created_at = created_at 64 self.updated_at = updated_at 65 self.originating_project_id = originating_project_id 66 self.share = share 67 self.total_size_bytes = total_size_bytes 68 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DatasetDetail.
80 def to_dict(self) -> dict[str, Any]: 81 from ..models.named_item import NamedItem 82 83 id = self.id 84 85 name = self.name 86 87 description = self.description 88 89 s3 = self.s3 90 91 process_id = self.process_id 92 93 project_id = self.project_id 94 95 source_dataset_ids = self.source_dataset_ids 96 97 source_datasets = [] 98 for source_datasets_item_data in self.source_datasets: 99 source_datasets_item = source_datasets_item_data.to_dict() 100 source_datasets.append(source_datasets_item) 101 102 source_sample_ids = self.source_sample_ids 103 104 source_sample_files_map = self.source_sample_files_map.to_dict() 105 106 status = self.status.value 107 108 status_message = self.status_message 109 110 tags = [] 111 for tags_item_data in self.tags: 112 tags_item = tags_item_data.to_dict() 113 tags.append(tags_item) 114 115 params = self.params.to_dict() 116 117 info = self.info.to_dict() 118 119 is_view_restricted = self.is_view_restricted 120 121 created_by = self.created_by 122 123 created_at = self.created_at.isoformat() 124 125 updated_at = self.updated_at.isoformat() 126 127 originating_project_id = self.originating_project_id 128 129 share: dict[str, Any] | None | Unset 130 if isinstance(self.share, Unset): 131 share = UNSET 132 elif isinstance(self.share, NamedItem): 133 share = self.share.to_dict() 134 else: 135 share = self.share 136 137 total_size_bytes: int | None | Unset 138 if isinstance(self.total_size_bytes, Unset): 139 total_size_bytes = UNSET 140 else: 141 total_size_bytes = self.total_size_bytes 142 143 field_dict: dict[str, Any] = {} 144 field_dict.update(self.additional_properties) 145 field_dict.update( 146 { 147 "id": id, 148 "name": name, 149 "description": description, 150 "s3": s3, 151 "processId": process_id, 152 "projectId": project_id, 153 "sourceDatasetIds": source_dataset_ids, 154 "sourceDatasets": source_datasets, 155 "sourceSampleIds": source_sample_ids, 156 "sourceSampleFilesMap": source_sample_files_map, 157 "status": status, 158 "statusMessage": status_message, 159 "tags": tags, 160 "params": params, 161 "info": info, 162 "isViewRestricted": is_view_restricted, 163 "createdBy": created_by, 164 "createdAt": created_at, 165 "updatedAt": updated_at, 166 } 167 ) 168 if originating_project_id is not UNSET: 169 field_dict["originatingProjectId"] = originating_project_id 170 if share is not UNSET: 171 field_dict["share"] = share 172 if total_size_bytes is not UNSET: 173 field_dict["totalSizeBytes"] = total_size_bytes 174 175 return field_dict
177 @classmethod 178 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 179 from ..models.dataset_detail_info import DatasetDetailInfo 180 from ..models.dataset_detail_params import DatasetDetailParams 181 from ..models.dataset_detail_source_sample_files_map import DatasetDetailSourceSampleFilesMap 182 from ..models.named_item import NamedItem 183 from ..models.tag import Tag 184 185 d = dict(src_dict) 186 id = d.pop("id") 187 188 name = d.pop("name") 189 190 description = d.pop("description") 191 192 s3 = d.pop("s3") 193 194 process_id = d.pop("processId") 195 196 project_id = d.pop("projectId") 197 198 source_dataset_ids = cast(list[str], d.pop("sourceDatasetIds")) 199 200 source_datasets = [] 201 _source_datasets = d.pop("sourceDatasets") 202 for source_datasets_item_data in _source_datasets: 203 source_datasets_item = NamedItem.from_dict(source_datasets_item_data) 204 205 source_datasets.append(source_datasets_item) 206 207 source_sample_ids = cast(list[str], d.pop("sourceSampleIds")) 208 209 source_sample_files_map = DatasetDetailSourceSampleFilesMap.from_dict(d.pop("sourceSampleFilesMap")) 210 211 status = Status(d.pop("status")) 212 213 status_message = d.pop("statusMessage") 214 215 tags = [] 216 _tags = d.pop("tags") 217 for tags_item_data in _tags: 218 tags_item = Tag.from_dict(tags_item_data) 219 220 tags.append(tags_item) 221 222 params = DatasetDetailParams.from_dict(d.pop("params")) 223 224 info = DatasetDetailInfo.from_dict(d.pop("info")) 225 226 is_view_restricted = d.pop("isViewRestricted") 227 228 created_by = d.pop("createdBy") 229 230 created_at = isoparse(d.pop("createdAt")) 231 232 updated_at = isoparse(d.pop("updatedAt")) 233 234 originating_project_id = d.pop("originatingProjectId", UNSET) 235 236 def _parse_share(data: object) -> NamedItem | None | Unset: 237 if data is None: 238 return data 239 if isinstance(data, Unset): 240 return data 241 try: 242 if not isinstance(data, dict): 243 raise TypeError() 244 share_type_1 = NamedItem.from_dict(data) 245 246 return share_type_1 247 except (TypeError, ValueError, AttributeError, KeyError): 248 pass 249 return cast(NamedItem | None | Unset, data) 250 251 share = _parse_share(d.pop("share", UNSET)) 252 253 def _parse_total_size_bytes(data: object) -> int | None | Unset: 254 if data is None: 255 return data 256 if isinstance(data, Unset): 257 return data 258 return cast(int | None | Unset, data) 259 260 total_size_bytes = _parse_total_size_bytes(d.pop("totalSizeBytes", UNSET)) 261 262 dataset_detail = cls( 263 id=id, 264 name=name, 265 description=description, 266 s3=s3, 267 process_id=process_id, 268 project_id=project_id, 269 source_dataset_ids=source_dataset_ids, 270 source_datasets=source_datasets, 271 source_sample_ids=source_sample_ids, 272 source_sample_files_map=source_sample_files_map, 273 status=status, 274 status_message=status_message, 275 tags=tags, 276 params=params, 277 info=info, 278 is_view_restricted=is_view_restricted, 279 created_by=created_by, 280 created_at=created_at, 281 updated_at=updated_at, 282 originating_project_id=originating_project_id, 283 share=share, 284 total_size_bytes=total_size_bytes, 285 ) 286 287 dataset_detail.additional_properties = d 288 return dataset_detail
13@_attrs_define 14class DatasetDetailInfo: 15 """ """ 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 dataset_detail_info = cls() 29 30 dataset_detail_info.additional_properties = d 31 return dataset_detail_info 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
13@_attrs_define 14class DatasetDetailParams: 15 """ """ 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 dataset_detail_params = cls() 29 30 dataset_detail_params.additional_properties = d 31 return dataset_detail_params 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
13@_attrs_define 14class DatasetDetailSourceSampleFilesMap: 15 """Keys are sampleIds, and the lists are file paths to include.""" 16 17 additional_properties: dict[str, list[str]] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 for prop_name, prop in self.additional_properties.items(): 22 field_dict[prop_name] = prop 23 24 return field_dict 25 26 @classmethod 27 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 28 d = dict(src_dict) 29 dataset_detail_source_sample_files_map = cls() 30 31 additional_properties = {} 32 for prop_name, prop_dict in d.items(): 33 additional_property = cast(list[str], prop_dict) 34 35 additional_properties[prop_name] = additional_property 36 37 dataset_detail_source_sample_files_map.additional_properties = additional_properties 38 return dataset_detail_source_sample_files_map 39 40 @property 41 def additional_keys(self) -> list[str]: 42 return list(self.additional_properties.keys()) 43 44 def __getitem__(self, key: str) -> list[str]: 45 return self.additional_properties[key] 46 47 def __setitem__(self, key: str, value: list[str]) -> None: 48 self.additional_properties[key] = value 49 50 def __delitem__(self, key: str) -> None: 51 del self.additional_properties[key] 52 53 def __contains__(self, key: str) -> bool: 54 return key in self.additional_properties
Keys are sampleIds, and the lists are file paths to include.
Method generated by attrs for class DatasetDetailSourceSampleFilesMap.
26 @classmethod 27 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 28 d = dict(src_dict) 29 dataset_detail_source_sample_files_map = cls() 30 31 additional_properties = {} 32 for prop_name, prop_dict in d.items(): 33 additional_property = cast(list[str], prop_dict) 34 35 additional_properties[prop_name] = additional_property 36 37 dataset_detail_source_sample_files_map.additional_properties = additional_properties 38 return dataset_detail_source_sample_files_map
19@_attrs_define 20class DatasetViz: 21 """ 22 Attributes: 23 path (str | Unset): Path to viz configuration, if applicable 24 name (str | Unset): Name of viz 25 desc (str | Unset): Description of viz 26 type_ (str | Unset): Type of viz Example: vitescce. 27 config (DatasetVizConfig | Unset): Config or path to config used to render viz 28 """ 29 30 path: str | Unset = UNSET 31 name: str | Unset = UNSET 32 desc: str | Unset = UNSET 33 type_: str | Unset = UNSET 34 config: DatasetVizConfig | Unset = UNSET 35 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 36 37 def to_dict(self) -> dict[str, Any]: 38 path = self.path 39 40 name = self.name 41 42 desc = self.desc 43 44 type_ = self.type_ 45 46 config: dict[str, Any] | Unset = UNSET 47 if not isinstance(self.config, Unset): 48 config = self.config.to_dict() 49 50 field_dict: dict[str, Any] = {} 51 field_dict.update(self.additional_properties) 52 field_dict.update({}) 53 if path is not UNSET: 54 field_dict["path"] = path 55 if name is not UNSET: 56 field_dict["name"] = name 57 if desc is not UNSET: 58 field_dict["desc"] = desc 59 if type_ is not UNSET: 60 field_dict["type"] = type_ 61 if config is not UNSET: 62 field_dict["config"] = config 63 64 return field_dict 65 66 @classmethod 67 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 68 from ..models.dataset_viz_config import DatasetVizConfig 69 70 d = dict(src_dict) 71 path = d.pop("path", UNSET) 72 73 name = d.pop("name", UNSET) 74 75 desc = d.pop("desc", UNSET) 76 77 type_ = d.pop("type", UNSET) 78 79 _config = d.pop("config", UNSET) 80 config: DatasetVizConfig | Unset 81 if isinstance(_config, Unset): 82 config = UNSET 83 else: 84 config = DatasetVizConfig.from_dict(_config) 85 86 dataset_viz = cls( 87 path=path, 88 name=name, 89 desc=desc, 90 type_=type_, 91 config=config, 92 ) 93 94 dataset_viz.additional_properties = d 95 return dataset_viz 96 97 @property 98 def additional_keys(self) -> list[str]: 99 return list(self.additional_properties.keys()) 100 101 def __getitem__(self, key: str) -> Any: 102 return self.additional_properties[key] 103 104 def __setitem__(self, key: str, value: Any) -> None: 105 self.additional_properties[key] = value 106 107 def __delitem__(self, key: str) -> None: 108 del self.additional_properties[key] 109 110 def __contains__(self, key: str) -> bool: 111 return key in self.additional_properties
Attributes:
- path (str | Unset): Path to viz configuration, if applicable
- name (str | Unset): Name of viz
- desc (str | Unset): Description of viz
- type_ (str | Unset): Type of viz Example: vitescce.
- config (DatasetVizConfig | Unset): Config or path to config used to render viz
28def __init__(self, path=attr_dict['path'].default, name=attr_dict['name'].default, desc=attr_dict['desc'].default, type_=attr_dict['type_'].default, config=attr_dict['config'].default): 29 self.path = path 30 self.name = name 31 self.desc = desc 32 self.type_ = type_ 33 self.config = config 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DatasetViz.
37 def to_dict(self) -> dict[str, Any]: 38 path = self.path 39 40 name = self.name 41 42 desc = self.desc 43 44 type_ = self.type_ 45 46 config: dict[str, Any] | Unset = UNSET 47 if not isinstance(self.config, Unset): 48 config = self.config.to_dict() 49 50 field_dict: dict[str, Any] = {} 51 field_dict.update(self.additional_properties) 52 field_dict.update({}) 53 if path is not UNSET: 54 field_dict["path"] = path 55 if name is not UNSET: 56 field_dict["name"] = name 57 if desc is not UNSET: 58 field_dict["desc"] = desc 59 if type_ is not UNSET: 60 field_dict["type"] = type_ 61 if config is not UNSET: 62 field_dict["config"] = config 63 64 return field_dict
66 @classmethod 67 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 68 from ..models.dataset_viz_config import DatasetVizConfig 69 70 d = dict(src_dict) 71 path = d.pop("path", UNSET) 72 73 name = d.pop("name", UNSET) 74 75 desc = d.pop("desc", UNSET) 76 77 type_ = d.pop("type", UNSET) 78 79 _config = d.pop("config", UNSET) 80 config: DatasetVizConfig | Unset 81 if isinstance(_config, Unset): 82 config = UNSET 83 else: 84 config = DatasetVizConfig.from_dict(_config) 85 86 dataset_viz = cls( 87 path=path, 88 name=name, 89 desc=desc, 90 type_=type_, 91 config=config, 92 ) 93 94 dataset_viz.additional_properties = d 95 return dataset_viz
13@_attrs_define 14class DatasetVizConfig: 15 """Config or path to config used to render viz""" 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 dataset_viz_config = cls() 29 30 dataset_viz_config.additional_properties = d 31 return dataset_viz_config 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
Config or path to config used to render viz
22@_attrs_define 23class Discussion: 24 """ 25 Attributes: 26 id (str): 27 name (str): 28 description (str): 29 entity (Entity): 30 type_ (DiscussionType): 31 project_id (str): 32 created_by (str): 33 created_at (datetime.datetime): 34 updated_at (datetime.datetime): 35 last_message_time (datetime.datetime | None | Unset): 36 """ 37 38 id: str 39 name: str 40 description: str 41 entity: Entity 42 type_: DiscussionType 43 project_id: str 44 created_by: str 45 created_at: datetime.datetime 46 updated_at: datetime.datetime 47 last_message_time: datetime.datetime | None | Unset = UNSET 48 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 49 50 def to_dict(self) -> dict[str, Any]: 51 id = self.id 52 53 name = self.name 54 55 description = self.description 56 57 entity = self.entity.to_dict() 58 59 type_ = self.type_.value 60 61 project_id = self.project_id 62 63 created_by = self.created_by 64 65 created_at = self.created_at.isoformat() 66 67 updated_at = self.updated_at.isoformat() 68 69 last_message_time: None | str | Unset 70 if isinstance(self.last_message_time, Unset): 71 last_message_time = UNSET 72 elif isinstance(self.last_message_time, datetime.datetime): 73 last_message_time = self.last_message_time.isoformat() 74 else: 75 last_message_time = self.last_message_time 76 77 field_dict: dict[str, Any] = {} 78 field_dict.update(self.additional_properties) 79 field_dict.update( 80 { 81 "id": id, 82 "name": name, 83 "description": description, 84 "entity": entity, 85 "type": type_, 86 "projectId": project_id, 87 "createdBy": created_by, 88 "createdAt": created_at, 89 "updatedAt": updated_at, 90 } 91 ) 92 if last_message_time is not UNSET: 93 field_dict["lastMessageTime"] = last_message_time 94 95 return field_dict 96 97 @classmethod 98 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 99 from ..models.entity import Entity 100 101 d = dict(src_dict) 102 id = d.pop("id") 103 104 name = d.pop("name") 105 106 description = d.pop("description") 107 108 entity = Entity.from_dict(d.pop("entity")) 109 110 type_ = DiscussionType(d.pop("type")) 111 112 project_id = d.pop("projectId") 113 114 created_by = d.pop("createdBy") 115 116 created_at = isoparse(d.pop("createdAt")) 117 118 updated_at = isoparse(d.pop("updatedAt")) 119 120 def _parse_last_message_time(data: object) -> datetime.datetime | None | Unset: 121 if data is None: 122 return data 123 if isinstance(data, Unset): 124 return data 125 try: 126 if not isinstance(data, str): 127 raise TypeError() 128 last_message_time_type_0 = isoparse(data) 129 130 return last_message_time_type_0 131 except (TypeError, ValueError, AttributeError, KeyError): 132 pass 133 return cast(datetime.datetime | None | Unset, data) 134 135 last_message_time = _parse_last_message_time(d.pop("lastMessageTime", UNSET)) 136 137 discussion = cls( 138 id=id, 139 name=name, 140 description=description, 141 entity=entity, 142 type_=type_, 143 project_id=project_id, 144 created_by=created_by, 145 created_at=created_at, 146 updated_at=updated_at, 147 last_message_time=last_message_time, 148 ) 149 150 discussion.additional_properties = d 151 return discussion 152 153 @property 154 def additional_keys(self) -> list[str]: 155 return list(self.additional_properties.keys()) 156 157 def __getitem__(self, key: str) -> Any: 158 return self.additional_properties[key] 159 160 def __setitem__(self, key: str, value: Any) -> None: 161 self.additional_properties[key] = value 162 163 def __delitem__(self, key: str) -> None: 164 del self.additional_properties[key] 165 166 def __contains__(self, key: str) -> bool: 167 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- description (str):
- entity (Entity):
- type_ (DiscussionType):
- project_id (str):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- last_message_time (datetime.datetime | None | Unset):
33def __init__(self, id, name, description, entity, type_, project_id, created_by, created_at, updated_at, last_message_time=attr_dict['last_message_time'].default): 34 self.id = id 35 self.name = name 36 self.description = description 37 self.entity = entity 38 self.type_ = type_ 39 self.project_id = project_id 40 self.created_by = created_by 41 self.created_at = created_at 42 self.updated_at = updated_at 43 self.last_message_time = last_message_time 44 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Discussion.
50 def to_dict(self) -> dict[str, Any]: 51 id = self.id 52 53 name = self.name 54 55 description = self.description 56 57 entity = self.entity.to_dict() 58 59 type_ = self.type_.value 60 61 project_id = self.project_id 62 63 created_by = self.created_by 64 65 created_at = self.created_at.isoformat() 66 67 updated_at = self.updated_at.isoformat() 68 69 last_message_time: None | str | Unset 70 if isinstance(self.last_message_time, Unset): 71 last_message_time = UNSET 72 elif isinstance(self.last_message_time, datetime.datetime): 73 last_message_time = self.last_message_time.isoformat() 74 else: 75 last_message_time = self.last_message_time 76 77 field_dict: dict[str, Any] = {} 78 field_dict.update(self.additional_properties) 79 field_dict.update( 80 { 81 "id": id, 82 "name": name, 83 "description": description, 84 "entity": entity, 85 "type": type_, 86 "projectId": project_id, 87 "createdBy": created_by, 88 "createdAt": created_at, 89 "updatedAt": updated_at, 90 } 91 ) 92 if last_message_time is not UNSET: 93 field_dict["lastMessageTime"] = last_message_time 94 95 return field_dict
97 @classmethod 98 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 99 from ..models.entity import Entity 100 101 d = dict(src_dict) 102 id = d.pop("id") 103 104 name = d.pop("name") 105 106 description = d.pop("description") 107 108 entity = Entity.from_dict(d.pop("entity")) 109 110 type_ = DiscussionType(d.pop("type")) 111 112 project_id = d.pop("projectId") 113 114 created_by = d.pop("createdBy") 115 116 created_at = isoparse(d.pop("createdAt")) 117 118 updated_at = isoparse(d.pop("updatedAt")) 119 120 def _parse_last_message_time(data: object) -> datetime.datetime | None | Unset: 121 if data is None: 122 return data 123 if isinstance(data, Unset): 124 return data 125 try: 126 if not isinstance(data, str): 127 raise TypeError() 128 last_message_time_type_0 = isoparse(data) 129 130 return last_message_time_type_0 131 except (TypeError, ValueError, AttributeError, KeyError): 132 pass 133 return cast(datetime.datetime | None | Unset, data) 134 135 last_message_time = _parse_last_message_time(d.pop("lastMessageTime", UNSET)) 136 137 discussion = cls( 138 id=id, 139 name=name, 140 description=description, 141 entity=entity, 142 type_=type_, 143 project_id=project_id, 144 created_by=created_by, 145 created_at=created_at, 146 updated_at=updated_at, 147 last_message_time=last_message_time, 148 ) 149 150 discussion.additional_properties = d 151 return discussion
19@_attrs_define 20class DiscussionInput: 21 """ 22 Attributes: 23 name (str): 24 description (str): 25 entity (Entity): 26 type_ (DiscussionType): 27 project_id (str): 28 """ 29 30 name: str 31 description: str 32 entity: Entity 33 type_: DiscussionType 34 project_id: str 35 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 36 37 def to_dict(self) -> dict[str, Any]: 38 name = self.name 39 40 description = self.description 41 42 entity = self.entity.to_dict() 43 44 type_ = self.type_.value 45 46 project_id = self.project_id 47 48 field_dict: dict[str, Any] = {} 49 field_dict.update(self.additional_properties) 50 field_dict.update( 51 { 52 "name": name, 53 "description": description, 54 "entity": entity, 55 "type": type_, 56 "projectId": project_id, 57 } 58 ) 59 60 return field_dict 61 62 @classmethod 63 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 64 from ..models.entity import Entity 65 66 d = dict(src_dict) 67 name = d.pop("name") 68 69 description = d.pop("description") 70 71 entity = Entity.from_dict(d.pop("entity")) 72 73 type_ = DiscussionType(d.pop("type")) 74 75 project_id = d.pop("projectId") 76 77 discussion_input = cls( 78 name=name, 79 description=description, 80 entity=entity, 81 type_=type_, 82 project_id=project_id, 83 ) 84 85 discussion_input.additional_properties = d 86 return discussion_input 87 88 @property 89 def additional_keys(self) -> list[str]: 90 return list(self.additional_properties.keys()) 91 92 def __getitem__(self, key: str) -> Any: 93 return self.additional_properties[key] 94 95 def __setitem__(self, key: str, value: Any) -> None: 96 self.additional_properties[key] = value 97 98 def __delitem__(self, key: str) -> None: 99 del self.additional_properties[key] 100 101 def __contains__(self, key: str) -> bool: 102 return key in self.additional_properties
Attributes:
- name (str):
- description (str):
- entity (Entity):
- type_ (DiscussionType):
- project_id (str):
28def __init__(self, name, description, entity, type_, project_id): 29 self.name = name 30 self.description = description 31 self.entity = entity 32 self.type_ = type_ 33 self.project_id = project_id 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DiscussionInput.
37 def to_dict(self) -> dict[str, Any]: 38 name = self.name 39 40 description = self.description 41 42 entity = self.entity.to_dict() 43 44 type_ = self.type_.value 45 46 project_id = self.project_id 47 48 field_dict: dict[str, Any] = {} 49 field_dict.update(self.additional_properties) 50 field_dict.update( 51 { 52 "name": name, 53 "description": description, 54 "entity": entity, 55 "type": type_, 56 "projectId": project_id, 57 } 58 ) 59 60 return field_dict
62 @classmethod 63 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 64 from ..models.entity import Entity 65 66 d = dict(src_dict) 67 name = d.pop("name") 68 69 description = d.pop("description") 70 71 entity = Entity.from_dict(d.pop("entity")) 72 73 type_ = DiscussionType(d.pop("type")) 74 75 project_id = d.pop("projectId") 76 77 discussion_input = cls( 78 name=name, 79 description=description, 80 entity=entity, 81 type_=type_, 82 project_id=project_id, 83 ) 84 85 discussion_input.additional_properties = d 86 return discussion_input
5class DiscussionType(str, Enum): 6 DISCUSSION = "DISCUSSION" 7 NOTES = "NOTES" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
15@_attrs_define 16class Entity: 17 """ 18 Attributes: 19 type_ (EntityType): 20 id (str): 21 """ 22 23 type_: EntityType 24 id: str 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 type_ = self.type_.value 29 30 id = self.id 31 32 field_dict: dict[str, Any] = {} 33 field_dict.update(self.additional_properties) 34 field_dict.update( 35 { 36 "type": type_, 37 "id": id, 38 } 39 ) 40 41 return field_dict 42 43 @classmethod 44 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 45 d = dict(src_dict) 46 type_ = EntityType(d.pop("type")) 47 48 id = d.pop("id") 49 50 entity = cls( 51 type_=type_, 52 id=id, 53 ) 54 55 entity.additional_properties = d 56 return entity 57 58 @property 59 def additional_keys(self) -> list[str]: 60 return list(self.additional_properties.keys()) 61 62 def __getitem__(self, key: str) -> Any: 63 return self.additional_properties[key] 64 65 def __setitem__(self, key: str, value: Any) -> None: 66 self.additional_properties[key] = value 67 68 def __delitem__(self, key: str) -> None: 69 del self.additional_properties[key] 70 71 def __contains__(self, key: str) -> bool: 72 return key in self.additional_properties
Attributes:
- type_ (EntityType):
- id (str):
25def __init__(self, type_, id): 26 self.type_ = type_ 27 self.id = id 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Entity.
5class EntityType(str, Enum): 6 DATASET = "DATASET" 7 DISCUSSION = "DISCUSSION" 8 NOTEBOOK = "NOTEBOOK" 9 PROCESS = "PROCESS" 10 PROJECT = "PROJECT" 11 REFERENCE = "REFERENCE" 12 SAMPLE = "SAMPLE" 13 SHARE = "SHARE" 14 TAG = "TAG" 15 UNKNOWN = "UNKNOWN" 16 USER = "USER" 17 WORKSPACE = "WORKSPACE" 18 19 def __str__(self) -> str: 20 return str(self.value) 21 22 @classmethod 23 def _missing_(cls, number): 24 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
5class EnvironmentType(str, Enum): 6 AWS_BATCH = "AWS_BATCH" 7 AWS_OMICS = "AWS_OMICS" 8 LOCAL_AGENT = "LOCAL_AGENT" 9 NONE = "NONE" 10 UNKNOWN = "UNKNOWN" 11 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 12 13 def __str__(self) -> str: 14 return str(self.value) 15 16 @classmethod 17 def _missing_(cls, number): 18 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
13@_attrs_define 14class ErrorMessage: 15 """ 16 Attributes: 17 message (str): 18 """ 19 20 message: str 21 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 22 23 def to_dict(self) -> dict[str, Any]: 24 message = self.message 25 26 field_dict: dict[str, Any] = {} 27 field_dict.update(self.additional_properties) 28 field_dict.update( 29 { 30 "message": message, 31 } 32 ) 33 34 return field_dict 35 36 @classmethod 37 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 38 d = dict(src_dict) 39 message = d.pop("message") 40 41 error_message = cls( 42 message=message, 43 ) 44 45 error_message.additional_properties = d 46 return error_message 47 48 @property 49 def additional_keys(self) -> list[str]: 50 return list(self.additional_properties.keys()) 51 52 def __getitem__(self, key: str) -> Any: 53 return self.additional_properties[key] 54 55 def __setitem__(self, key: str, value: Any) -> None: 56 self.additional_properties[key] = value 57 58 def __delitem__(self, key: str) -> None: 59 del self.additional_properties[key] 60 61 def __contains__(self, key: str) -> bool: 62 return key in self.additional_properties
Attributes:
- message (str):
24def __init__(self, message): 25 self.message = message 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ErrorMessage.
5class Executor(str, Enum): 6 CROMWELL = "CROMWELL" 7 INGEST = "INGEST" 8 NEXTFLOW = "NEXTFLOW" 9 OMICS_READY2RUN = "OMICS_READY2RUN" 10 UNKNOWN = "UNKNOWN" 11 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 12 13 def __str__(self) -> str: 14 return str(self.value) 15 16 @classmethod 17 def _missing_(cls, number): 18 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
13@_attrs_define 14class FeatureFlags: 15 """ 16 Attributes: 17 sftp_enabled (bool): 18 governance_enabled (bool): 19 project_requests_enabled (bool): 20 workspaces_enabled (bool): 21 drive_enabled (bool): 22 """ 23 24 sftp_enabled: bool 25 governance_enabled: bool 26 project_requests_enabled: bool 27 workspaces_enabled: bool 28 drive_enabled: bool 29 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 30 31 def to_dict(self) -> dict[str, Any]: 32 sftp_enabled = self.sftp_enabled 33 34 governance_enabled = self.governance_enabled 35 36 project_requests_enabled = self.project_requests_enabled 37 38 workspaces_enabled = self.workspaces_enabled 39 40 drive_enabled = self.drive_enabled 41 42 field_dict: dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "sftpEnabled": sftp_enabled, 47 "governanceEnabled": governance_enabled, 48 "projectRequestsEnabled": project_requests_enabled, 49 "workspacesEnabled": workspaces_enabled, 50 "driveEnabled": drive_enabled, 51 } 52 ) 53 54 return field_dict 55 56 @classmethod 57 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 58 d = dict(src_dict) 59 sftp_enabled = d.pop("sftpEnabled") 60 61 governance_enabled = d.pop("governanceEnabled") 62 63 project_requests_enabled = d.pop("projectRequestsEnabled") 64 65 workspaces_enabled = d.pop("workspacesEnabled") 66 67 drive_enabled = d.pop("driveEnabled") 68 69 feature_flags = cls( 70 sftp_enabled=sftp_enabled, 71 governance_enabled=governance_enabled, 72 project_requests_enabled=project_requests_enabled, 73 workspaces_enabled=workspaces_enabled, 74 drive_enabled=drive_enabled, 75 ) 76 77 feature_flags.additional_properties = d 78 return feature_flags 79 80 @property 81 def additional_keys(self) -> list[str]: 82 return list(self.additional_properties.keys()) 83 84 def __getitem__(self, key: str) -> Any: 85 return self.additional_properties[key] 86 87 def __setitem__(self, key: str, value: Any) -> None: 88 self.additional_properties[key] = value 89 90 def __delitem__(self, key: str) -> None: 91 del self.additional_properties[key] 92 93 def __contains__(self, key: str) -> bool: 94 return key in self.additional_properties
Attributes:
- sftp_enabled (bool):
- governance_enabled (bool):
- project_requests_enabled (bool):
- workspaces_enabled (bool):
- drive_enabled (bool):
28def __init__(self, sftp_enabled, governance_enabled, project_requests_enabled, workspaces_enabled, drive_enabled): 29 self.sftp_enabled = sftp_enabled 30 self.governance_enabled = governance_enabled 31 self.project_requests_enabled = project_requests_enabled 32 self.workspaces_enabled = workspaces_enabled 33 self.drive_enabled = drive_enabled 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FeatureFlags.
31 def to_dict(self) -> dict[str, Any]: 32 sftp_enabled = self.sftp_enabled 33 34 governance_enabled = self.governance_enabled 35 36 project_requests_enabled = self.project_requests_enabled 37 38 workspaces_enabled = self.workspaces_enabled 39 40 drive_enabled = self.drive_enabled 41 42 field_dict: dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "sftpEnabled": sftp_enabled, 47 "governanceEnabled": governance_enabled, 48 "projectRequestsEnabled": project_requests_enabled, 49 "workspacesEnabled": workspaces_enabled, 50 "driveEnabled": drive_enabled, 51 } 52 ) 53 54 return field_dict
56 @classmethod 57 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 58 d = dict(src_dict) 59 sftp_enabled = d.pop("sftpEnabled") 60 61 governance_enabled = d.pop("governanceEnabled") 62 63 project_requests_enabled = d.pop("projectRequestsEnabled") 64 65 workspaces_enabled = d.pop("workspacesEnabled") 66 67 drive_enabled = d.pop("driveEnabled") 68 69 feature_flags = cls( 70 sftp_enabled=sftp_enabled, 71 governance_enabled=governance_enabled, 72 project_requests_enabled=project_requests_enabled, 73 workspaces_enabled=workspaces_enabled, 74 drive_enabled=drive_enabled, 75 ) 76 77 feature_flags.additional_properties = d 78 return feature_flags
19@_attrs_define 20class FileEntry: 21 """ 22 Attributes: 23 path (str | Unset): Relative path to file Example: data/fastq/SRX12875516_SRR16674827_1.fastq.gz. 24 size (int | Unset): File size (in bytes) Example: 1435658507. 25 metadata (FileEntryMetadata | Unset): Metadata associated with the file Example: {'read': 1}. 26 """ 27 28 path: str | Unset = UNSET 29 size: int | Unset = UNSET 30 metadata: FileEntryMetadata | Unset = UNSET 31 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 32 33 def to_dict(self) -> dict[str, Any]: 34 path = self.path 35 36 size = self.size 37 38 metadata: dict[str, Any] | Unset = UNSET 39 if not isinstance(self.metadata, Unset): 40 metadata = self.metadata.to_dict() 41 42 field_dict: dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update({}) 45 if path is not UNSET: 46 field_dict["path"] = path 47 if size is not UNSET: 48 field_dict["size"] = size 49 if metadata is not UNSET: 50 field_dict["metadata"] = metadata 51 52 return field_dict 53 54 @classmethod 55 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 56 from ..models.file_entry_metadata import FileEntryMetadata 57 58 d = dict(src_dict) 59 path = d.pop("path", UNSET) 60 61 size = d.pop("size", UNSET) 62 63 _metadata = d.pop("metadata", UNSET) 64 metadata: FileEntryMetadata | Unset 65 if isinstance(_metadata, Unset): 66 metadata = UNSET 67 else: 68 metadata = FileEntryMetadata.from_dict(_metadata) 69 70 file_entry = cls( 71 path=path, 72 size=size, 73 metadata=metadata, 74 ) 75 76 file_entry.additional_properties = d 77 return file_entry 78 79 @property 80 def additional_keys(self) -> list[str]: 81 return list(self.additional_properties.keys()) 82 83 def __getitem__(self, key: str) -> Any: 84 return self.additional_properties[key] 85 86 def __setitem__(self, key: str, value: Any) -> None: 87 self.additional_properties[key] = value 88 89 def __delitem__(self, key: str) -> None: 90 del self.additional_properties[key] 91 92 def __contains__(self, key: str) -> bool: 93 return key in self.additional_properties
Attributes:
- path (str | Unset): Relative path to file Example: data/fastq/SRX12875516_SRR16674827_1.fastq.gz.
- size (int | Unset): File size (in bytes) Example: 1435658507.
- metadata (FileEntryMetadata | Unset): Metadata associated with the file Example: {'read': 1}.
26def __init__(self, path=attr_dict['path'].default, size=attr_dict['size'].default, metadata=attr_dict['metadata'].default): 27 self.path = path 28 self.size = size 29 self.metadata = metadata 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FileEntry.
33 def to_dict(self) -> dict[str, Any]: 34 path = self.path 35 36 size = self.size 37 38 metadata: dict[str, Any] | Unset = UNSET 39 if not isinstance(self.metadata, Unset): 40 metadata = self.metadata.to_dict() 41 42 field_dict: dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update({}) 45 if path is not UNSET: 46 field_dict["path"] = path 47 if size is not UNSET: 48 field_dict["size"] = size 49 if metadata is not UNSET: 50 field_dict["metadata"] = metadata 51 52 return field_dict
54 @classmethod 55 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 56 from ..models.file_entry_metadata import FileEntryMetadata 57 58 d = dict(src_dict) 59 path = d.pop("path", UNSET) 60 61 size = d.pop("size", UNSET) 62 63 _metadata = d.pop("metadata", UNSET) 64 metadata: FileEntryMetadata | Unset 65 if isinstance(_metadata, Unset): 66 metadata = UNSET 67 else: 68 metadata = FileEntryMetadata.from_dict(_metadata) 69 70 file_entry = cls( 71 path=path, 72 size=size, 73 metadata=metadata, 74 ) 75 76 file_entry.additional_properties = d 77 return file_entry
13@_attrs_define 14class FileEntryMetadata: 15 """Metadata associated with the file 16 17 Example: 18 {'read': 1} 19 20 """ 21 22 additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> dict[str, Any]: 25 field_dict: dict[str, Any] = {} 26 field_dict.update(self.additional_properties) 27 28 return field_dict 29 30 @classmethod 31 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 32 d = dict(src_dict) 33 file_entry_metadata = cls() 34 35 file_entry_metadata.additional_properties = d 36 return file_entry_metadata 37 38 @property 39 def additional_keys(self) -> list[str]: 40 return list(self.additional_properties.keys()) 41 42 def __getitem__(self, key: str) -> str: 43 return self.additional_properties[key] 44 45 def __setitem__(self, key: str, value: str) -> None: 46 self.additional_properties[key] = value 47 48 def __delitem__(self, key: str) -> None: 49 del self.additional_properties[key] 50 51 def __contains__(self, key: str) -> bool: 52 return key in self.additional_properties
Metadata associated with the file
Example:
{'read': 1}
19@_attrs_define 20class FileMappingRule: 21 """ 22 Attributes: 23 description (str): Describes the group of possible files that meet a single file type criteria. 24 file_name_patterns (list[FileNamePattern]): Describes the possible file patterns to expect for the file type 25 group. 26 min_ (int | Unset): Minimum number of files to expect for the file type group. 27 max_ (int | Unset): Maximum number of files to expect for the file type group. 28 is_sample (bool | Unset): Specifies if the file type will be associated with a sample. 29 """ 30 31 description: str 32 file_name_patterns: list[FileNamePattern] 33 min_: int | Unset = UNSET 34 max_: int | Unset = UNSET 35 is_sample: bool | Unset = UNSET 36 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 37 38 def to_dict(self) -> dict[str, Any]: 39 description = self.description 40 41 file_name_patterns = [] 42 for file_name_patterns_item_data in self.file_name_patterns: 43 file_name_patterns_item = file_name_patterns_item_data.to_dict() 44 file_name_patterns.append(file_name_patterns_item) 45 46 min_ = self.min_ 47 48 max_ = self.max_ 49 50 is_sample = self.is_sample 51 52 field_dict: dict[str, Any] = {} 53 field_dict.update(self.additional_properties) 54 field_dict.update( 55 { 56 "description": description, 57 "fileNamePatterns": file_name_patterns, 58 } 59 ) 60 if min_ is not UNSET: 61 field_dict["min"] = min_ 62 if max_ is not UNSET: 63 field_dict["max"] = max_ 64 if is_sample is not UNSET: 65 field_dict["isSample"] = is_sample 66 67 return field_dict 68 69 @classmethod 70 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 71 from ..models.file_name_pattern import FileNamePattern 72 73 d = dict(src_dict) 74 description = d.pop("description") 75 76 file_name_patterns = [] 77 _file_name_patterns = d.pop("fileNamePatterns") 78 for file_name_patterns_item_data in _file_name_patterns: 79 file_name_patterns_item = FileNamePattern.from_dict(file_name_patterns_item_data) 80 81 file_name_patterns.append(file_name_patterns_item) 82 83 min_ = d.pop("min", UNSET) 84 85 max_ = d.pop("max", UNSET) 86 87 is_sample = d.pop("isSample", UNSET) 88 89 file_mapping_rule = cls( 90 description=description, 91 file_name_patterns=file_name_patterns, 92 min_=min_, 93 max_=max_, 94 is_sample=is_sample, 95 ) 96 97 file_mapping_rule.additional_properties = d 98 return file_mapping_rule 99 100 @property 101 def additional_keys(self) -> list[str]: 102 return list(self.additional_properties.keys()) 103 104 def __getitem__(self, key: str) -> Any: 105 return self.additional_properties[key] 106 107 def __setitem__(self, key: str, value: Any) -> None: 108 self.additional_properties[key] = value 109 110 def __delitem__(self, key: str) -> None: 111 del self.additional_properties[key] 112 113 def __contains__(self, key: str) -> bool: 114 return key in self.additional_properties
Attributes:
- description (str): Describes the group of possible files that meet a single file type criteria.
- file_name_patterns (list[FileNamePattern]): Describes the possible file patterns to expect for the file type group.
- min_ (int | Unset): Minimum number of files to expect for the file type group.
- max_ (int | Unset): Maximum number of files to expect for the file type group.
- is_sample (bool | Unset): Specifies if the file type will be associated with a sample.
28def __init__(self, description, file_name_patterns, min_=attr_dict['min_'].default, max_=attr_dict['max_'].default, is_sample=attr_dict['is_sample'].default): 29 self.description = description 30 self.file_name_patterns = file_name_patterns 31 self.min_ = min_ 32 self.max_ = max_ 33 self.is_sample = is_sample 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FileMappingRule.
38 def to_dict(self) -> dict[str, Any]: 39 description = self.description 40 41 file_name_patterns = [] 42 for file_name_patterns_item_data in self.file_name_patterns: 43 file_name_patterns_item = file_name_patterns_item_data.to_dict() 44 file_name_patterns.append(file_name_patterns_item) 45 46 min_ = self.min_ 47 48 max_ = self.max_ 49 50 is_sample = self.is_sample 51 52 field_dict: dict[str, Any] = {} 53 field_dict.update(self.additional_properties) 54 field_dict.update( 55 { 56 "description": description, 57 "fileNamePatterns": file_name_patterns, 58 } 59 ) 60 if min_ is not UNSET: 61 field_dict["min"] = min_ 62 if max_ is not UNSET: 63 field_dict["max"] = max_ 64 if is_sample is not UNSET: 65 field_dict["isSample"] = is_sample 66 67 return field_dict
69 @classmethod 70 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 71 from ..models.file_name_pattern import FileNamePattern 72 73 d = dict(src_dict) 74 description = d.pop("description") 75 76 file_name_patterns = [] 77 _file_name_patterns = d.pop("fileNamePatterns") 78 for file_name_patterns_item_data in _file_name_patterns: 79 file_name_patterns_item = FileNamePattern.from_dict(file_name_patterns_item_data) 80 81 file_name_patterns.append(file_name_patterns_item) 82 83 min_ = d.pop("min", UNSET) 84 85 max_ = d.pop("max", UNSET) 86 87 is_sample = d.pop("isSample", UNSET) 88 89 file_mapping_rule = cls( 90 description=description, 91 file_name_patterns=file_name_patterns, 92 min_=min_, 93 max_=max_, 94 is_sample=is_sample, 95 ) 96 97 file_mapping_rule.additional_properties = d 98 return file_mapping_rule
13@_attrs_define 14class FileNameMatch: 15 """ 16 Attributes: 17 file_name (str): 18 sample_name (str): 19 regex_pattern_match (str): 20 """ 21 22 file_name: str 23 sample_name: str 24 regex_pattern_match: str 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 file_name = self.file_name 29 30 sample_name = self.sample_name 31 32 regex_pattern_match = self.regex_pattern_match 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "fileName": file_name, 39 "sampleName": sample_name, 40 "regexPatternMatch": regex_pattern_match, 41 } 42 ) 43 44 return field_dict 45 46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 file_name = d.pop("fileName") 50 51 sample_name = d.pop("sampleName") 52 53 regex_pattern_match = d.pop("regexPatternMatch") 54 55 file_name_match = cls( 56 file_name=file_name, 57 sample_name=sample_name, 58 regex_pattern_match=regex_pattern_match, 59 ) 60 61 file_name_match.additional_properties = d 62 return file_name_match 63 64 @property 65 def additional_keys(self) -> list[str]: 66 return list(self.additional_properties.keys()) 67 68 def __getitem__(self, key: str) -> Any: 69 return self.additional_properties[key] 70 71 def __setitem__(self, key: str, value: Any) -> None: 72 self.additional_properties[key] = value 73 74 def __delitem__(self, key: str) -> None: 75 del self.additional_properties[key] 76 77 def __contains__(self, key: str) -> bool: 78 return key in self.additional_properties
Attributes:
- file_name (str):
- sample_name (str):
- regex_pattern_match (str):
26def __init__(self, file_name, sample_name, regex_pattern_match): 27 self.file_name = file_name 28 self.sample_name = sample_name 29 self.regex_pattern_match = regex_pattern_match 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FileNameMatch.
27 def to_dict(self) -> dict[str, Any]: 28 file_name = self.file_name 29 30 sample_name = self.sample_name 31 32 regex_pattern_match = self.regex_pattern_match 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "fileName": file_name, 39 "sampleName": sample_name, 40 "regexPatternMatch": regex_pattern_match, 41 } 42 ) 43 44 return field_dict
46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 file_name = d.pop("fileName") 50 51 sample_name = d.pop("sampleName") 52 53 regex_pattern_match = d.pop("regexPatternMatch") 54 55 file_name_match = cls( 56 file_name=file_name, 57 sample_name=sample_name, 58 regex_pattern_match=regex_pattern_match, 59 ) 60 61 file_name_match.additional_properties = d 62 return file_name_match
15@_attrs_define 16class FileNamePattern: 17 """ 18 Attributes: 19 example_name (str): User-readable name for the file type used for display. 20 sample_matching_pattern (str): File name pattern, formatted as a valid regex, to extract sample name and other 21 metadata. 22 description (None | str | Unset): File description. 23 """ 24 25 example_name: str 26 sample_matching_pattern: str 27 description: None | str | Unset = UNSET 28 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 29 30 def to_dict(self) -> dict[str, Any]: 31 example_name = self.example_name 32 33 sample_matching_pattern = self.sample_matching_pattern 34 35 description: None | str | Unset 36 if isinstance(self.description, Unset): 37 description = UNSET 38 else: 39 description = self.description 40 41 field_dict: dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update( 44 { 45 "exampleName": example_name, 46 "sampleMatchingPattern": sample_matching_pattern, 47 } 48 ) 49 if description is not UNSET: 50 field_dict["description"] = description 51 52 return field_dict 53 54 @classmethod 55 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 56 d = dict(src_dict) 57 example_name = d.pop("exampleName") 58 59 sample_matching_pattern = d.pop("sampleMatchingPattern") 60 61 def _parse_description(data: object) -> None | str | Unset: 62 if data is None: 63 return data 64 if isinstance(data, Unset): 65 return data 66 return cast(None | str | Unset, data) 67 68 description = _parse_description(d.pop("description", UNSET)) 69 70 file_name_pattern = cls( 71 example_name=example_name, 72 sample_matching_pattern=sample_matching_pattern, 73 description=description, 74 ) 75 76 file_name_pattern.additional_properties = d 77 return file_name_pattern 78 79 @property 80 def additional_keys(self) -> list[str]: 81 return list(self.additional_properties.keys()) 82 83 def __getitem__(self, key: str) -> Any: 84 return self.additional_properties[key] 85 86 def __setitem__(self, key: str, value: Any) -> None: 87 self.additional_properties[key] = value 88 89 def __delitem__(self, key: str) -> None: 90 del self.additional_properties[key] 91 92 def __contains__(self, key: str) -> bool: 93 return key in self.additional_properties
Attributes:
- example_name (str): User-readable name for the file type used for display.
- sample_matching_pattern (str): File name pattern, formatted as a valid regex, to extract sample name and other metadata.
- description (None | str | Unset): File description.
26def __init__(self, example_name, sample_matching_pattern, description=attr_dict['description'].default): 27 self.example_name = example_name 28 self.sample_matching_pattern = sample_matching_pattern 29 self.description = description 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FileNamePattern.
30 def to_dict(self) -> dict[str, Any]: 31 example_name = self.example_name 32 33 sample_matching_pattern = self.sample_matching_pattern 34 35 description: None | str | Unset 36 if isinstance(self.description, Unset): 37 description = UNSET 38 else: 39 description = self.description 40 41 field_dict: dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update( 44 { 45 "exampleName": example_name, 46 "sampleMatchingPattern": sample_matching_pattern, 47 } 48 ) 49 if description is not UNSET: 50 field_dict["description"] = description 51 52 return field_dict
54 @classmethod 55 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 56 d = dict(src_dict) 57 example_name = d.pop("exampleName") 58 59 sample_matching_pattern = d.pop("sampleMatchingPattern") 60 61 def _parse_description(data: object) -> None | str | Unset: 62 if data is None: 63 return data 64 if isinstance(data, Unset): 65 return data 66 return cast(None | str | Unset, data) 67 68 description = _parse_description(d.pop("description", UNSET)) 69 70 file_name_pattern = cls( 71 example_name=example_name, 72 sample_matching_pattern=sample_matching_pattern, 73 description=description, 74 ) 75 76 file_name_pattern.additional_properties = d 77 return file_name_pattern
17@_attrs_define 18class FileRequirements: 19 """ 20 Attributes: 21 files (list[str]): 22 error_msg (str): 23 allowed_data_types (list[AllowedDataType]): 24 """ 25 26 files: list[str] 27 error_msg: str 28 allowed_data_types: list[AllowedDataType] 29 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 30 31 def to_dict(self) -> dict[str, Any]: 32 files = self.files 33 34 error_msg = self.error_msg 35 36 allowed_data_types = [] 37 for allowed_data_types_item_data in self.allowed_data_types: 38 allowed_data_types_item = allowed_data_types_item_data.to_dict() 39 allowed_data_types.append(allowed_data_types_item) 40 41 field_dict: dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update( 44 { 45 "files": files, 46 "errorMsg": error_msg, 47 "allowedDataTypes": allowed_data_types, 48 } 49 ) 50 51 return field_dict 52 53 @classmethod 54 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 55 from ..models.allowed_data_type import AllowedDataType 56 57 d = dict(src_dict) 58 files = cast(list[str], d.pop("files")) 59 60 error_msg = d.pop("errorMsg") 61 62 allowed_data_types = [] 63 _allowed_data_types = d.pop("allowedDataTypes") 64 for allowed_data_types_item_data in _allowed_data_types: 65 allowed_data_types_item = AllowedDataType.from_dict(allowed_data_types_item_data) 66 67 allowed_data_types.append(allowed_data_types_item) 68 69 file_requirements = cls( 70 files=files, 71 error_msg=error_msg, 72 allowed_data_types=allowed_data_types, 73 ) 74 75 file_requirements.additional_properties = d 76 return file_requirements 77 78 @property 79 def additional_keys(self) -> list[str]: 80 return list(self.additional_properties.keys()) 81 82 def __getitem__(self, key: str) -> Any: 83 return self.additional_properties[key] 84 85 def __setitem__(self, key: str, value: Any) -> None: 86 self.additional_properties[key] = value 87 88 def __delitem__(self, key: str) -> None: 89 del self.additional_properties[key] 90 91 def __contains__(self, key: str) -> bool: 92 return key in self.additional_properties
Attributes:
- files (list[str]):
- error_msg (str):
- allowed_data_types (list[AllowedDataType]):
26def __init__(self, files, error_msg, allowed_data_types): 27 self.files = files 28 self.error_msg = error_msg 29 self.allowed_data_types = allowed_data_types 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FileRequirements.
31 def to_dict(self) -> dict[str, Any]: 32 files = self.files 33 34 error_msg = self.error_msg 35 36 allowed_data_types = [] 37 for allowed_data_types_item_data in self.allowed_data_types: 38 allowed_data_types_item = allowed_data_types_item_data.to_dict() 39 allowed_data_types.append(allowed_data_types_item) 40 41 field_dict: dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update( 44 { 45 "files": files, 46 "errorMsg": error_msg, 47 "allowedDataTypes": allowed_data_types, 48 } 49 ) 50 51 return field_dict
53 @classmethod 54 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 55 from ..models.allowed_data_type import AllowedDataType 56 57 d = dict(src_dict) 58 files = cast(list[str], d.pop("files")) 59 60 error_msg = d.pop("errorMsg") 61 62 allowed_data_types = [] 63 _allowed_data_types = d.pop("allowedDataTypes") 64 for allowed_data_types_item_data in _allowed_data_types: 65 allowed_data_types_item = AllowedDataType.from_dict(allowed_data_types_item_data) 66 67 allowed_data_types.append(allowed_data_types_item) 68 69 file_requirements = cls( 70 files=files, 71 error_msg=error_msg, 72 allowed_data_types=allowed_data_types, 73 ) 74 75 file_requirements.additional_properties = d 76 return file_requirements
20@_attrs_define 21class FormSchema: 22 """ 23 Attributes: 24 form (FormSchemaForm | Unset): JSONSchema representation of the parameters 25 ui (FormSchemaUi | Unset): Describes how the form should be rendered, see rjsf 26 """ 27 28 form: FormSchemaForm | Unset = UNSET 29 ui: FormSchemaUi | Unset = UNSET 30 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 31 32 def to_dict(self) -> dict[str, Any]: 33 form: dict[str, Any] | Unset = UNSET 34 if not isinstance(self.form, Unset): 35 form = self.form.to_dict() 36 37 ui: dict[str, Any] | Unset = UNSET 38 if not isinstance(self.ui, Unset): 39 ui = self.ui.to_dict() 40 41 field_dict: dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update({}) 44 if form is not UNSET: 45 field_dict["form"] = form 46 if ui is not UNSET: 47 field_dict["ui"] = ui 48 49 return field_dict 50 51 @classmethod 52 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 53 from ..models.form_schema_form import FormSchemaForm 54 from ..models.form_schema_ui import FormSchemaUi 55 56 d = dict(src_dict) 57 _form = d.pop("form", UNSET) 58 form: FormSchemaForm | Unset 59 if isinstance(_form, Unset): 60 form = UNSET 61 else: 62 form = FormSchemaForm.from_dict(_form) 63 64 _ui = d.pop("ui", UNSET) 65 ui: FormSchemaUi | Unset 66 if isinstance(_ui, Unset): 67 ui = UNSET 68 else: 69 ui = FormSchemaUi.from_dict(_ui) 70 71 form_schema = cls( 72 form=form, 73 ui=ui, 74 ) 75 76 form_schema.additional_properties = d 77 return form_schema 78 79 @property 80 def additional_keys(self) -> list[str]: 81 return list(self.additional_properties.keys()) 82 83 def __getitem__(self, key: str) -> Any: 84 return self.additional_properties[key] 85 86 def __setitem__(self, key: str, value: Any) -> None: 87 self.additional_properties[key] = value 88 89 def __delitem__(self, key: str) -> None: 90 del self.additional_properties[key] 91 92 def __contains__(self, key: str) -> bool: 93 return key in self.additional_properties
Attributes:
- form (FormSchemaForm | Unset): JSONSchema representation of the parameters
- ui (FormSchemaUi | Unset): Describes how the form should be rendered, see rjsf
25def __init__(self, form=attr_dict['form'].default, ui=attr_dict['ui'].default): 26 self.form = form 27 self.ui = ui 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FormSchema.
32 def to_dict(self) -> dict[str, Any]: 33 form: dict[str, Any] | Unset = UNSET 34 if not isinstance(self.form, Unset): 35 form = self.form.to_dict() 36 37 ui: dict[str, Any] | Unset = UNSET 38 if not isinstance(self.ui, Unset): 39 ui = self.ui.to_dict() 40 41 field_dict: dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update({}) 44 if form is not UNSET: 45 field_dict["form"] = form 46 if ui is not UNSET: 47 field_dict["ui"] = ui 48 49 return field_dict
51 @classmethod 52 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 53 from ..models.form_schema_form import FormSchemaForm 54 from ..models.form_schema_ui import FormSchemaUi 55 56 d = dict(src_dict) 57 _form = d.pop("form", UNSET) 58 form: FormSchemaForm | Unset 59 if isinstance(_form, Unset): 60 form = UNSET 61 else: 62 form = FormSchemaForm.from_dict(_form) 63 64 _ui = d.pop("ui", UNSET) 65 ui: FormSchemaUi | Unset 66 if isinstance(_ui, Unset): 67 ui = UNSET 68 else: 69 ui = FormSchemaUi.from_dict(_ui) 70 71 form_schema = cls( 72 form=form, 73 ui=ui, 74 ) 75 76 form_schema.additional_properties = d 77 return form_schema
13@_attrs_define 14class FormSchemaForm: 15 """JSONSchema representation of the parameters""" 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 form_schema_form = cls() 29 30 form_schema_form.additional_properties = d 31 return form_schema_form 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
JSONSchema representation of the parameters
13@_attrs_define 14class FormSchemaUi: 15 """Describes how the form should be rendered, see rjsf""" 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 form_schema_ui = cls() 29 30 form_schema_ui.additional_properties = d 31 return form_schema_ui 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
Describes how the form should be rendered, see rjsf
13@_attrs_define 14class FulfillmentResponse: 15 """ 16 Attributes: 17 fulfillment_id (str): 18 path (str): 19 """ 20 21 fulfillment_id: str 22 path: str 23 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 24 25 def to_dict(self) -> dict[str, Any]: 26 fulfillment_id = self.fulfillment_id 27 28 path = self.path 29 30 field_dict: dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "fulfillmentId": fulfillment_id, 35 "path": path, 36 } 37 ) 38 39 return field_dict 40 41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 fulfillment_id = d.pop("fulfillmentId") 45 46 path = d.pop("path") 47 48 fulfillment_response = cls( 49 fulfillment_id=fulfillment_id, 50 path=path, 51 ) 52 53 fulfillment_response.additional_properties = d 54 return fulfillment_response 55 56 @property 57 def additional_keys(self) -> list[str]: 58 return list(self.additional_properties.keys()) 59 60 def __getitem__(self, key: str) -> Any: 61 return self.additional_properties[key] 62 63 def __setitem__(self, key: str, value: Any) -> None: 64 self.additional_properties[key] = value 65 66 def __delitem__(self, key: str) -> None: 67 del self.additional_properties[key] 68 69 def __contains__(self, key: str) -> bool: 70 return key in self.additional_properties
Attributes:
- fulfillment_id (str):
- path (str):
25def __init__(self, fulfillment_id, path): 26 self.fulfillment_id = fulfillment_id 27 self.path = path 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FulfillmentResponse.
25 def to_dict(self) -> dict[str, Any]: 26 fulfillment_id = self.fulfillment_id 27 28 path = self.path 29 30 field_dict: dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "fulfillmentId": fulfillment_id, 35 "path": path, 36 } 37 ) 38 39 return field_dict
41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 fulfillment_id = d.pop("fulfillmentId") 45 46 path = d.pop("path") 47 48 fulfillment_response = cls( 49 fulfillment_id=fulfillment_id, 50 path=path, 51 ) 52 53 fulfillment_response.additional_properties = d 54 return fulfillment_response
15@_attrs_define 16class GenerateSftpCredentialsRequest: 17 """ 18 Attributes: 19 lifetime_days (int | Unset): Number of days the credentials are valid for Default: 1. 20 """ 21 22 lifetime_days: int | Unset = 1 23 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 24 25 def to_dict(self) -> dict[str, Any]: 26 lifetime_days = self.lifetime_days 27 28 field_dict: dict[str, Any] = {} 29 field_dict.update(self.additional_properties) 30 field_dict.update({}) 31 if lifetime_days is not UNSET: 32 field_dict["lifetimeDays"] = lifetime_days 33 34 return field_dict 35 36 @classmethod 37 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 38 d = dict(src_dict) 39 lifetime_days = d.pop("lifetimeDays", UNSET) 40 41 generate_sftp_credentials_request = cls( 42 lifetime_days=lifetime_days, 43 ) 44 45 generate_sftp_credentials_request.additional_properties = d 46 return generate_sftp_credentials_request 47 48 @property 49 def additional_keys(self) -> list[str]: 50 return list(self.additional_properties.keys()) 51 52 def __getitem__(self, key: str) -> Any: 53 return self.additional_properties[key] 54 55 def __setitem__(self, key: str, value: Any) -> None: 56 self.additional_properties[key] = value 57 58 def __delitem__(self, key: str) -> None: 59 del self.additional_properties[key] 60 61 def __contains__(self, key: str) -> bool: 62 return key in self.additional_properties
Attributes:
- lifetime_days (int | Unset): Number of days the credentials are valid for Default: 1.
24def __init__(self, lifetime_days=attr_dict['lifetime_days'].default): 25 self.lifetime_days = lifetime_days 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GenerateSftpCredentialsRequest.
36 @classmethod 37 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 38 d = dict(src_dict) 39 lifetime_days = d.pop("lifetimeDays", UNSET) 40 41 generate_sftp_credentials_request = cls( 42 lifetime_days=lifetime_days, 43 ) 44 45 generate_sftp_credentials_request.additional_properties = d 46 return generate_sftp_credentials_request
17@_attrs_define 18class GetExecutionLogsResponse: 19 """ 20 Attributes: 21 events (list[LogEntry]): 22 """ 23 24 events: list[LogEntry] 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 events = [] 29 for events_item_data in self.events: 30 events_item = events_item_data.to_dict() 31 events.append(events_item) 32 33 field_dict: dict[str, Any] = {} 34 field_dict.update(self.additional_properties) 35 field_dict.update( 36 { 37 "events": events, 38 } 39 ) 40 41 return field_dict 42 43 @classmethod 44 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 45 from ..models.log_entry import LogEntry 46 47 d = dict(src_dict) 48 events = [] 49 _events = d.pop("events") 50 for events_item_data in _events: 51 events_item = LogEntry.from_dict(events_item_data) 52 53 events.append(events_item) 54 55 get_execution_logs_response = cls( 56 events=events, 57 ) 58 59 get_execution_logs_response.additional_properties = d 60 return get_execution_logs_response 61 62 @property 63 def additional_keys(self) -> list[str]: 64 return list(self.additional_properties.keys()) 65 66 def __getitem__(self, key: str) -> Any: 67 return self.additional_properties[key] 68 69 def __setitem__(self, key: str, value: Any) -> None: 70 self.additional_properties[key] = value 71 72 def __delitem__(self, key: str) -> None: 73 del self.additional_properties[key] 74 75 def __contains__(self, key: str) -> bool: 76 return key in self.additional_properties
Attributes:
- events (list[LogEntry]):
24def __init__(self, events): 25 self.events = events 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GetExecutionLogsResponse.
27 def to_dict(self) -> dict[str, Any]: 28 events = [] 29 for events_item_data in self.events: 30 events_item = events_item_data.to_dict() 31 events.append(events_item) 32 33 field_dict: dict[str, Any] = {} 34 field_dict.update(self.additional_properties) 35 field_dict.update( 36 { 37 "events": events, 38 } 39 ) 40 41 return field_dict
43 @classmethod 44 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 45 from ..models.log_entry import LogEntry 46 47 d = dict(src_dict) 48 events = [] 49 _events = d.pop("events") 50 for events_item_data in _events: 51 events_item = LogEntry.from_dict(events_item_data) 52 53 events.append(events_item) 54 55 get_execution_logs_response = cls( 56 events=events, 57 ) 58 59 get_execution_logs_response.additional_properties = d 60 return get_execution_logs_response
17@_attrs_define 18class GetProjectSummaryResponse200: 19 """ """ 20 21 additional_properties: dict[str, list[Task]] = _attrs_field(init=False, factory=dict) 22 23 def to_dict(self) -> dict[str, Any]: 24 field_dict: dict[str, Any] = {} 25 for prop_name, prop in self.additional_properties.items(): 26 field_dict[prop_name] = [] 27 for additional_property_item_data in prop: 28 additional_property_item = additional_property_item_data.to_dict() 29 field_dict[prop_name].append(additional_property_item) 30 31 return field_dict 32 33 @classmethod 34 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 35 from ..models.task import Task 36 37 d = dict(src_dict) 38 get_project_summary_response_200 = cls() 39 40 additional_properties = {} 41 for prop_name, prop_dict in d.items(): 42 additional_property = [] 43 _additional_property = prop_dict 44 for additional_property_item_data in _additional_property: 45 additional_property_item = Task.from_dict(additional_property_item_data) 46 47 additional_property.append(additional_property_item) 48 49 additional_properties[prop_name] = additional_property 50 51 get_project_summary_response_200.additional_properties = additional_properties 52 return get_project_summary_response_200 53 54 @property 55 def additional_keys(self) -> list[str]: 56 return list(self.additional_properties.keys()) 57 58 def __getitem__(self, key: str) -> list[Task]: 59 return self.additional_properties[key] 60 61 def __setitem__(self, key: str, value: list[Task]) -> None: 62 self.additional_properties[key] = value 63 64 def __delitem__(self, key: str) -> None: 65 del self.additional_properties[key] 66 67 def __contains__(self, key: str) -> bool: 68 return key in self.additional_properties
23 def to_dict(self) -> dict[str, Any]: 24 field_dict: dict[str, Any] = {} 25 for prop_name, prop in self.additional_properties.items(): 26 field_dict[prop_name] = [] 27 for additional_property_item_data in prop: 28 additional_property_item = additional_property_item_data.to_dict() 29 field_dict[prop_name].append(additional_property_item) 30 31 return field_dict
33 @classmethod 34 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 35 from ..models.task import Task 36 37 d = dict(src_dict) 38 get_project_summary_response_200 = cls() 39 40 additional_properties = {} 41 for prop_name, prop_dict in d.items(): 42 additional_property = [] 43 _additional_property = prop_dict 44 for additional_property_item_data in _additional_property: 45 additional_property_item = Task.from_dict(additional_property_item_data) 46 47 additional_property.append(additional_property_item) 48 49 additional_properties[prop_name] = additional_property 50 51 get_project_summary_response_200.additional_properties = additional_properties 52 return get_project_summary_response_200
5class GovernanceAccessType(str, Enum): 6 FULFILLMENT_DOWNLOAD = "FULFILLMENT_DOWNLOAD" 7 FULFILLMENT_UPLOAD = "FULFILLMENT_UPLOAD" 8 GOVERNANCE_DOWNLOAD = "GOVERNANCE_DOWNLOAD" 9 GOVERNANCE_UPLOAD = "GOVERNANCE_UPLOAD" 10 UNKNOWN = "UNKNOWN" 11 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 12 13 def __str__(self) -> str: 14 return str(self.value) 15 16 @classmethod 17 def _missing_(cls, number): 18 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
15@_attrs_define 16class GovernanceClassification: 17 """ 18 Attributes: 19 id (str): 20 name (str): 21 description (str): 22 requirement_ids (list[str]): 23 created_by (str): 24 created_at (datetime.datetime): 25 updated_at (datetime.datetime): 26 """ 27 28 id: str 29 name: str 30 description: str 31 requirement_ids: list[str] 32 created_by: str 33 created_at: datetime.datetime 34 updated_at: datetime.datetime 35 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 36 37 def to_dict(self) -> dict[str, Any]: 38 id = self.id 39 40 name = self.name 41 42 description = self.description 43 44 requirement_ids = self.requirement_ids 45 46 created_by = self.created_by 47 48 created_at = self.created_at.isoformat() 49 50 updated_at = self.updated_at.isoformat() 51 52 field_dict: dict[str, Any] = {} 53 field_dict.update(self.additional_properties) 54 field_dict.update( 55 { 56 "id": id, 57 "name": name, 58 "description": description, 59 "requirementIds": requirement_ids, 60 "createdBy": created_by, 61 "createdAt": created_at, 62 "updatedAt": updated_at, 63 } 64 ) 65 66 return field_dict 67 68 @classmethod 69 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 70 d = dict(src_dict) 71 id = d.pop("id") 72 73 name = d.pop("name") 74 75 description = d.pop("description") 76 77 requirement_ids = cast(list[str], d.pop("requirementIds")) 78 79 created_by = d.pop("createdBy") 80 81 created_at = isoparse(d.pop("createdAt")) 82 83 updated_at = isoparse(d.pop("updatedAt")) 84 85 governance_classification = cls( 86 id=id, 87 name=name, 88 description=description, 89 requirement_ids=requirement_ids, 90 created_by=created_by, 91 created_at=created_at, 92 updated_at=updated_at, 93 ) 94 95 governance_classification.additional_properties = d 96 return governance_classification 97 98 @property 99 def additional_keys(self) -> list[str]: 100 return list(self.additional_properties.keys()) 101 102 def __getitem__(self, key: str) -> Any: 103 return self.additional_properties[key] 104 105 def __setitem__(self, key: str, value: Any) -> None: 106 self.additional_properties[key] = value 107 108 def __delitem__(self, key: str) -> None: 109 del self.additional_properties[key] 110 111 def __contains__(self, key: str) -> bool: 112 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- description (str):
- requirement_ids (list[str]):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
30def __init__(self, id, name, description, requirement_ids, created_by, created_at, updated_at): 31 self.id = id 32 self.name = name 33 self.description = description 34 self.requirement_ids = requirement_ids 35 self.created_by = created_by 36 self.created_at = created_at 37 self.updated_at = updated_at 38 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceClassification.
37 def to_dict(self) -> dict[str, Any]: 38 id = self.id 39 40 name = self.name 41 42 description = self.description 43 44 requirement_ids = self.requirement_ids 45 46 created_by = self.created_by 47 48 created_at = self.created_at.isoformat() 49 50 updated_at = self.updated_at.isoformat() 51 52 field_dict: dict[str, Any] = {} 53 field_dict.update(self.additional_properties) 54 field_dict.update( 55 { 56 "id": id, 57 "name": name, 58 "description": description, 59 "requirementIds": requirement_ids, 60 "createdBy": created_by, 61 "createdAt": created_at, 62 "updatedAt": updated_at, 63 } 64 ) 65 66 return field_dict
68 @classmethod 69 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 70 d = dict(src_dict) 71 id = d.pop("id") 72 73 name = d.pop("name") 74 75 description = d.pop("description") 76 77 requirement_ids = cast(list[str], d.pop("requirementIds")) 78 79 created_by = d.pop("createdBy") 80 81 created_at = isoparse(d.pop("createdAt")) 82 83 updated_at = isoparse(d.pop("updatedAt")) 84 85 governance_classification = cls( 86 id=id, 87 name=name, 88 description=description, 89 requirement_ids=requirement_ids, 90 created_by=created_by, 91 created_at=created_at, 92 updated_at=updated_at, 93 ) 94 95 governance_classification.additional_properties = d 96 return governance_classification
15@_attrs_define 16class GovernanceContact: 17 """ 18 Attributes: 19 id (str): 20 title (str): 21 description (str): 22 name (str): 23 phone (str): 24 email (str): 25 created_by (str): 26 created_at (datetime.datetime): 27 updated_at (datetime.datetime): 28 """ 29 30 id: str 31 title: str 32 description: str 33 name: str 34 phone: str 35 email: str 36 created_by: str 37 created_at: datetime.datetime 38 updated_at: datetime.datetime 39 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 40 41 def to_dict(self) -> dict[str, Any]: 42 id = self.id 43 44 title = self.title 45 46 description = self.description 47 48 name = self.name 49 50 phone = self.phone 51 52 email = self.email 53 54 created_by = self.created_by 55 56 created_at = self.created_at.isoformat() 57 58 updated_at = self.updated_at.isoformat() 59 60 field_dict: dict[str, Any] = {} 61 field_dict.update(self.additional_properties) 62 field_dict.update( 63 { 64 "id": id, 65 "title": title, 66 "description": description, 67 "name": name, 68 "phone": phone, 69 "email": email, 70 "createdBy": created_by, 71 "createdAt": created_at, 72 "updatedAt": updated_at, 73 } 74 ) 75 76 return field_dict 77 78 @classmethod 79 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 80 d = dict(src_dict) 81 id = d.pop("id") 82 83 title = d.pop("title") 84 85 description = d.pop("description") 86 87 name = d.pop("name") 88 89 phone = d.pop("phone") 90 91 email = d.pop("email") 92 93 created_by = d.pop("createdBy") 94 95 created_at = isoparse(d.pop("createdAt")) 96 97 updated_at = isoparse(d.pop("updatedAt")) 98 99 governance_contact = cls( 100 id=id, 101 title=title, 102 description=description, 103 name=name, 104 phone=phone, 105 email=email, 106 created_by=created_by, 107 created_at=created_at, 108 updated_at=updated_at, 109 ) 110 111 governance_contact.additional_properties = d 112 return governance_contact 113 114 @property 115 def additional_keys(self) -> list[str]: 116 return list(self.additional_properties.keys()) 117 118 def __getitem__(self, key: str) -> Any: 119 return self.additional_properties[key] 120 121 def __setitem__(self, key: str, value: Any) -> None: 122 self.additional_properties[key] = value 123 124 def __delitem__(self, key: str) -> None: 125 del self.additional_properties[key] 126 127 def __contains__(self, key: str) -> bool: 128 return key in self.additional_properties
Attributes:
- id (str):
- title (str):
- description (str):
- name (str):
- phone (str):
- email (str):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
32def __init__(self, id, title, description, name, phone, email, created_by, created_at, updated_at): 33 self.id = id 34 self.title = title 35 self.description = description 36 self.name = name 37 self.phone = phone 38 self.email = email 39 self.created_by = created_by 40 self.created_at = created_at 41 self.updated_at = updated_at 42 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceContact.
41 def to_dict(self) -> dict[str, Any]: 42 id = self.id 43 44 title = self.title 45 46 description = self.description 47 48 name = self.name 49 50 phone = self.phone 51 52 email = self.email 53 54 created_by = self.created_by 55 56 created_at = self.created_at.isoformat() 57 58 updated_at = self.updated_at.isoformat() 59 60 field_dict: dict[str, Any] = {} 61 field_dict.update(self.additional_properties) 62 field_dict.update( 63 { 64 "id": id, 65 "title": title, 66 "description": description, 67 "name": name, 68 "phone": phone, 69 "email": email, 70 "createdBy": created_by, 71 "createdAt": created_at, 72 "updatedAt": updated_at, 73 } 74 ) 75 76 return field_dict
78 @classmethod 79 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 80 d = dict(src_dict) 81 id = d.pop("id") 82 83 title = d.pop("title") 84 85 description = d.pop("description") 86 87 name = d.pop("name") 88 89 phone = d.pop("phone") 90 91 email = d.pop("email") 92 93 created_by = d.pop("createdBy") 94 95 created_at = isoparse(d.pop("createdAt")) 96 97 updated_at = isoparse(d.pop("updatedAt")) 98 99 governance_contact = cls( 100 id=id, 101 title=title, 102 description=description, 103 name=name, 104 phone=phone, 105 email=email, 106 created_by=created_by, 107 created_at=created_at, 108 updated_at=updated_at, 109 ) 110 111 governance_contact.additional_properties = d 112 return governance_contact
18@_attrs_define 19class GovernanceExpiry: 20 """ 21 Attributes: 22 type_ (GovernanceExpiryType | Unset): The expiry conditions that can be applied to governance requirements. 23 days (int | None | Unset): The number of days for a relative expiration 24 date (datetime.datetime | None | Unset): The date for an absolute expiration 25 """ 26 27 type_: GovernanceExpiryType | Unset = UNSET 28 days: int | None | Unset = UNSET 29 date: datetime.datetime | None | Unset = UNSET 30 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 31 32 def to_dict(self) -> dict[str, Any]: 33 type_: str | Unset = UNSET 34 if not isinstance(self.type_, Unset): 35 type_ = self.type_.value 36 37 days: int | None | Unset 38 if isinstance(self.days, Unset): 39 days = UNSET 40 else: 41 days = self.days 42 43 date: None | str | Unset 44 if isinstance(self.date, Unset): 45 date = UNSET 46 elif isinstance(self.date, datetime.datetime): 47 date = self.date.isoformat() 48 else: 49 date = self.date 50 51 field_dict: dict[str, Any] = {} 52 field_dict.update(self.additional_properties) 53 field_dict.update({}) 54 if type_ is not UNSET: 55 field_dict["type"] = type_ 56 if days is not UNSET: 57 field_dict["days"] = days 58 if date is not UNSET: 59 field_dict["date"] = date 60 61 return field_dict 62 63 @classmethod 64 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 65 d = dict(src_dict) 66 _type_ = d.pop("type", UNSET) 67 type_: GovernanceExpiryType | Unset 68 if isinstance(_type_, Unset): 69 type_ = UNSET 70 else: 71 type_ = GovernanceExpiryType(_type_) 72 73 def _parse_days(data: object) -> int | None | Unset: 74 if data is None: 75 return data 76 if isinstance(data, Unset): 77 return data 78 return cast(int | None | Unset, data) 79 80 days = _parse_days(d.pop("days", UNSET)) 81 82 def _parse_date(data: object) -> datetime.datetime | None | Unset: 83 if data is None: 84 return data 85 if isinstance(data, Unset): 86 return data 87 try: 88 if not isinstance(data, str): 89 raise TypeError() 90 date_type_0 = isoparse(data) 91 92 return date_type_0 93 except (TypeError, ValueError, AttributeError, KeyError): 94 pass 95 return cast(datetime.datetime | None | Unset, data) 96 97 date = _parse_date(d.pop("date", UNSET)) 98 99 governance_expiry = cls( 100 type_=type_, 101 days=days, 102 date=date, 103 ) 104 105 governance_expiry.additional_properties = d 106 return governance_expiry 107 108 @property 109 def additional_keys(self) -> list[str]: 110 return list(self.additional_properties.keys()) 111 112 def __getitem__(self, key: str) -> Any: 113 return self.additional_properties[key] 114 115 def __setitem__(self, key: str, value: Any) -> None: 116 self.additional_properties[key] = value 117 118 def __delitem__(self, key: str) -> None: 119 del self.additional_properties[key] 120 121 def __contains__(self, key: str) -> bool: 122 return key in self.additional_properties
Attributes:
- type_ (GovernanceExpiryType | Unset): The expiry conditions that can be applied to governance requirements.
- days (int | None | Unset): The number of days for a relative expiration
- date (datetime.datetime | None | Unset): The date for an absolute expiration
26def __init__(self, type_=attr_dict['type_'].default, days=attr_dict['days'].default, date=attr_dict['date'].default): 27 self.type_ = type_ 28 self.days = days 29 self.date = date 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceExpiry.
32 def to_dict(self) -> dict[str, Any]: 33 type_: str | Unset = UNSET 34 if not isinstance(self.type_, Unset): 35 type_ = self.type_.value 36 37 days: int | None | Unset 38 if isinstance(self.days, Unset): 39 days = UNSET 40 else: 41 days = self.days 42 43 date: None | str | Unset 44 if isinstance(self.date, Unset): 45 date = UNSET 46 elif isinstance(self.date, datetime.datetime): 47 date = self.date.isoformat() 48 else: 49 date = self.date 50 51 field_dict: dict[str, Any] = {} 52 field_dict.update(self.additional_properties) 53 field_dict.update({}) 54 if type_ is not UNSET: 55 field_dict["type"] = type_ 56 if days is not UNSET: 57 field_dict["days"] = days 58 if date is not UNSET: 59 field_dict["date"] = date 60 61 return field_dict
63 @classmethod 64 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 65 d = dict(src_dict) 66 _type_ = d.pop("type", UNSET) 67 type_: GovernanceExpiryType | Unset 68 if isinstance(_type_, Unset): 69 type_ = UNSET 70 else: 71 type_ = GovernanceExpiryType(_type_) 72 73 def _parse_days(data: object) -> int | None | Unset: 74 if data is None: 75 return data 76 if isinstance(data, Unset): 77 return data 78 return cast(int | None | Unset, data) 79 80 days = _parse_days(d.pop("days", UNSET)) 81 82 def _parse_date(data: object) -> datetime.datetime | None | Unset: 83 if data is None: 84 return data 85 if isinstance(data, Unset): 86 return data 87 try: 88 if not isinstance(data, str): 89 raise TypeError() 90 date_type_0 = isoparse(data) 91 92 return date_type_0 93 except (TypeError, ValueError, AttributeError, KeyError): 94 pass 95 return cast(datetime.datetime | None | Unset, data) 96 97 date = _parse_date(d.pop("date", UNSET)) 98 99 governance_expiry = cls( 100 type_=type_, 101 days=days, 102 date=date, 103 ) 104 105 governance_expiry.additional_properties = d 106 return governance_expiry
5class GovernanceExpiryType(str, Enum): 6 ABSOLUTE = "ABSOLUTE" 7 NONE = "NONE" 8 RELATIVE_COMPLETION = "RELATIVE_COMPLETION" 9 RELATIVE_ENACTMENT = "RELATIVE_ENACTMENT" 10 UNKNOWN = "UNKNOWN" 11 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 12 13 def __str__(self) -> str: 14 return str(self.value) 15 16 @classmethod 17 def _missing_(cls, number): 18 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
16@_attrs_define 17class GovernanceFile: 18 """ 19 Attributes: 20 name (str | Unset): The title of the resource visible to users 21 description (str | Unset): A description of the resource visible to users 22 src (str | Unset): The file name without path or the full link path 23 type_ (GovernanceFileType | Unset): The options for supplementals for governance requirements 24 """ 25 26 name: str | Unset = UNSET 27 description: str | Unset = UNSET 28 src: str | Unset = UNSET 29 type_: GovernanceFileType | Unset = UNSET 30 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 31 32 def to_dict(self) -> dict[str, Any]: 33 name = self.name 34 35 description = self.description 36 37 src = self.src 38 39 type_: str | Unset = UNSET 40 if not isinstance(self.type_, Unset): 41 type_ = self.type_.value 42 43 field_dict: dict[str, Any] = {} 44 field_dict.update(self.additional_properties) 45 field_dict.update({}) 46 if name is not UNSET: 47 field_dict["name"] = name 48 if description is not UNSET: 49 field_dict["description"] = description 50 if src is not UNSET: 51 field_dict["src"] = src 52 if type_ is not UNSET: 53 field_dict["type"] = type_ 54 55 return field_dict 56 57 @classmethod 58 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 59 d = dict(src_dict) 60 name = d.pop("name", UNSET) 61 62 description = d.pop("description", UNSET) 63 64 src = d.pop("src", UNSET) 65 66 _type_ = d.pop("type", UNSET) 67 type_: GovernanceFileType | Unset 68 if isinstance(_type_, Unset): 69 type_ = UNSET 70 else: 71 type_ = GovernanceFileType(_type_) 72 73 governance_file = cls( 74 name=name, 75 description=description, 76 src=src, 77 type_=type_, 78 ) 79 80 governance_file.additional_properties = d 81 return governance_file 82 83 @property 84 def additional_keys(self) -> list[str]: 85 return list(self.additional_properties.keys()) 86 87 def __getitem__(self, key: str) -> Any: 88 return self.additional_properties[key] 89 90 def __setitem__(self, key: str, value: Any) -> None: 91 self.additional_properties[key] = value 92 93 def __delitem__(self, key: str) -> None: 94 del self.additional_properties[key] 95 96 def __contains__(self, key: str) -> bool: 97 return key in self.additional_properties
Attributes:
- name (str | Unset): The title of the resource visible to users
- description (str | Unset): A description of the resource visible to users
- src (str | Unset): The file name without path or the full link path
- type_ (GovernanceFileType | Unset): The options for supplementals for governance requirements
27def __init__(self, name=attr_dict['name'].default, description=attr_dict['description'].default, src=attr_dict['src'].default, type_=attr_dict['type_'].default): 28 self.name = name 29 self.description = description 30 self.src = src 31 self.type_ = type_ 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceFile.
32 def to_dict(self) -> dict[str, Any]: 33 name = self.name 34 35 description = self.description 36 37 src = self.src 38 39 type_: str | Unset = UNSET 40 if not isinstance(self.type_, Unset): 41 type_ = self.type_.value 42 43 field_dict: dict[str, Any] = {} 44 field_dict.update(self.additional_properties) 45 field_dict.update({}) 46 if name is not UNSET: 47 field_dict["name"] = name 48 if description is not UNSET: 49 field_dict["description"] = description 50 if src is not UNSET: 51 field_dict["src"] = src 52 if type_ is not UNSET: 53 field_dict["type"] = type_ 54 55 return field_dict
57 @classmethod 58 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 59 d = dict(src_dict) 60 name = d.pop("name", UNSET) 61 62 description = d.pop("description", UNSET) 63 64 src = d.pop("src", UNSET) 65 66 _type_ = d.pop("type", UNSET) 67 type_: GovernanceFileType | Unset 68 if isinstance(_type_, Unset): 69 type_ = UNSET 70 else: 71 type_ = GovernanceFileType(_type_) 72 73 governance_file = cls( 74 name=name, 75 description=description, 76 src=src, 77 type_=type_, 78 ) 79 80 governance_file.additional_properties = d 81 return governance_file
16@_attrs_define 17class GovernanceFileAccessRequest: 18 """ 19 Attributes: 20 access_type (GovernanceAccessType): 21 fulfillment_id (None | str | Unset): 22 project_id (None | str | Unset): 23 token_lifetime_hours (int | None | Unset): 24 """ 25 26 access_type: GovernanceAccessType 27 fulfillment_id: None | str | Unset = UNSET 28 project_id: None | str | Unset = UNSET 29 token_lifetime_hours: int | None | Unset = UNSET 30 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 31 32 def to_dict(self) -> dict[str, Any]: 33 access_type = self.access_type.value 34 35 fulfillment_id: None | str | Unset 36 if isinstance(self.fulfillment_id, Unset): 37 fulfillment_id = UNSET 38 else: 39 fulfillment_id = self.fulfillment_id 40 41 project_id: None | str | Unset 42 if isinstance(self.project_id, Unset): 43 project_id = UNSET 44 else: 45 project_id = self.project_id 46 47 token_lifetime_hours: int | None | Unset 48 if isinstance(self.token_lifetime_hours, Unset): 49 token_lifetime_hours = UNSET 50 else: 51 token_lifetime_hours = self.token_lifetime_hours 52 53 field_dict: dict[str, Any] = {} 54 field_dict.update(self.additional_properties) 55 field_dict.update( 56 { 57 "accessType": access_type, 58 } 59 ) 60 if fulfillment_id is not UNSET: 61 field_dict["fulfillmentId"] = fulfillment_id 62 if project_id is not UNSET: 63 field_dict["projectId"] = project_id 64 if token_lifetime_hours is not UNSET: 65 field_dict["tokenLifetimeHours"] = token_lifetime_hours 66 67 return field_dict 68 69 @classmethod 70 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 71 d = dict(src_dict) 72 access_type = GovernanceAccessType(d.pop("accessType")) 73 74 def _parse_fulfillment_id(data: object) -> None | str | Unset: 75 if data is None: 76 return data 77 if isinstance(data, Unset): 78 return data 79 return cast(None | str | Unset, data) 80 81 fulfillment_id = _parse_fulfillment_id(d.pop("fulfillmentId", UNSET)) 82 83 def _parse_project_id(data: object) -> None | str | Unset: 84 if data is None: 85 return data 86 if isinstance(data, Unset): 87 return data 88 return cast(None | str | Unset, data) 89 90 project_id = _parse_project_id(d.pop("projectId", UNSET)) 91 92 def _parse_token_lifetime_hours(data: object) -> int | None | Unset: 93 if data is None: 94 return data 95 if isinstance(data, Unset): 96 return data 97 return cast(int | None | Unset, data) 98 99 token_lifetime_hours = _parse_token_lifetime_hours(d.pop("tokenLifetimeHours", UNSET)) 100 101 governance_file_access_request = cls( 102 access_type=access_type, 103 fulfillment_id=fulfillment_id, 104 project_id=project_id, 105 token_lifetime_hours=token_lifetime_hours, 106 ) 107 108 governance_file_access_request.additional_properties = d 109 return governance_file_access_request 110 111 @property 112 def additional_keys(self) -> list[str]: 113 return list(self.additional_properties.keys()) 114 115 def __getitem__(self, key: str) -> Any: 116 return self.additional_properties[key] 117 118 def __setitem__(self, key: str, value: Any) -> None: 119 self.additional_properties[key] = value 120 121 def __delitem__(self, key: str) -> None: 122 del self.additional_properties[key] 123 124 def __contains__(self, key: str) -> bool: 125 return key in self.additional_properties
Attributes:
- access_type (GovernanceAccessType):
- fulfillment_id (None | str | Unset):
- project_id (None | str | Unset):
- token_lifetime_hours (int | None | Unset):
27def __init__(self, access_type, fulfillment_id=attr_dict['fulfillment_id'].default, project_id=attr_dict['project_id'].default, token_lifetime_hours=attr_dict['token_lifetime_hours'].default): 28 self.access_type = access_type 29 self.fulfillment_id = fulfillment_id 30 self.project_id = project_id 31 self.token_lifetime_hours = token_lifetime_hours 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceFileAccessRequest.
32 def to_dict(self) -> dict[str, Any]: 33 access_type = self.access_type.value 34 35 fulfillment_id: None | str | Unset 36 if isinstance(self.fulfillment_id, Unset): 37 fulfillment_id = UNSET 38 else: 39 fulfillment_id = self.fulfillment_id 40 41 project_id: None | str | Unset 42 if isinstance(self.project_id, Unset): 43 project_id = UNSET 44 else: 45 project_id = self.project_id 46 47 token_lifetime_hours: int | None | Unset 48 if isinstance(self.token_lifetime_hours, Unset): 49 token_lifetime_hours = UNSET 50 else: 51 token_lifetime_hours = self.token_lifetime_hours 52 53 field_dict: dict[str, Any] = {} 54 field_dict.update(self.additional_properties) 55 field_dict.update( 56 { 57 "accessType": access_type, 58 } 59 ) 60 if fulfillment_id is not UNSET: 61 field_dict["fulfillmentId"] = fulfillment_id 62 if project_id is not UNSET: 63 field_dict["projectId"] = project_id 64 if token_lifetime_hours is not UNSET: 65 field_dict["tokenLifetimeHours"] = token_lifetime_hours 66 67 return field_dict
69 @classmethod 70 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 71 d = dict(src_dict) 72 access_type = GovernanceAccessType(d.pop("accessType")) 73 74 def _parse_fulfillment_id(data: object) -> None | str | Unset: 75 if data is None: 76 return data 77 if isinstance(data, Unset): 78 return data 79 return cast(None | str | Unset, data) 80 81 fulfillment_id = _parse_fulfillment_id(d.pop("fulfillmentId", UNSET)) 82 83 def _parse_project_id(data: object) -> None | str | Unset: 84 if data is None: 85 return data 86 if isinstance(data, Unset): 87 return data 88 return cast(None | str | Unset, data) 89 90 project_id = _parse_project_id(d.pop("projectId", UNSET)) 91 92 def _parse_token_lifetime_hours(data: object) -> int | None | Unset: 93 if data is None: 94 return data 95 if isinstance(data, Unset): 96 return data 97 return cast(int | None | Unset, data) 98 99 token_lifetime_hours = _parse_token_lifetime_hours(d.pop("tokenLifetimeHours", UNSET)) 100 101 governance_file_access_request = cls( 102 access_type=access_type, 103 fulfillment_id=fulfillment_id, 104 project_id=project_id, 105 token_lifetime_hours=token_lifetime_hours, 106 ) 107 108 governance_file_access_request.additional_properties = d 109 return governance_file_access_request
15@_attrs_define 16class GovernanceFileInput: 17 """ 18 Attributes: 19 name (str): 20 description (str): 21 src (str): 22 type_ (GovernanceFileType): The options for supplementals for governance requirements 23 """ 24 25 name: str 26 description: str 27 src: str 28 type_: GovernanceFileType 29 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 30 31 def to_dict(self) -> dict[str, Any]: 32 name = self.name 33 34 description = self.description 35 36 src = self.src 37 38 type_ = self.type_.value 39 40 field_dict: dict[str, Any] = {} 41 field_dict.update(self.additional_properties) 42 field_dict.update( 43 { 44 "name": name, 45 "description": description, 46 "src": src, 47 "type": type_, 48 } 49 ) 50 51 return field_dict 52 53 @classmethod 54 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 55 d = dict(src_dict) 56 name = d.pop("name") 57 58 description = d.pop("description") 59 60 src = d.pop("src") 61 62 type_ = GovernanceFileType(d.pop("type")) 63 64 governance_file_input = cls( 65 name=name, 66 description=description, 67 src=src, 68 type_=type_, 69 ) 70 71 governance_file_input.additional_properties = d 72 return governance_file_input 73 74 @property 75 def additional_keys(self) -> list[str]: 76 return list(self.additional_properties.keys()) 77 78 def __getitem__(self, key: str) -> Any: 79 return self.additional_properties[key] 80 81 def __setitem__(self, key: str, value: Any) -> None: 82 self.additional_properties[key] = value 83 84 def __delitem__(self, key: str) -> None: 85 del self.additional_properties[key] 86 87 def __contains__(self, key: str) -> bool: 88 return key in self.additional_properties
Attributes:
- name (str):
- description (str):
- src (str):
- type_ (GovernanceFileType): The options for supplementals for governance requirements
27def __init__(self, name, description, src, type_): 28 self.name = name 29 self.description = description 30 self.src = src 31 self.type_ = type_ 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceFileInput.
31 def to_dict(self) -> dict[str, Any]: 32 name = self.name 33 34 description = self.description 35 36 src = self.src 37 38 type_ = self.type_.value 39 40 field_dict: dict[str, Any] = {} 41 field_dict.update(self.additional_properties) 42 field_dict.update( 43 { 44 "name": name, 45 "description": description, 46 "src": src, 47 "type": type_, 48 } 49 ) 50 51 return field_dict
53 @classmethod 54 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 55 d = dict(src_dict) 56 name = d.pop("name") 57 58 description = d.pop("description") 59 60 src = d.pop("src") 61 62 type_ = GovernanceFileType(d.pop("type")) 63 64 governance_file_input = cls( 65 name=name, 66 description=description, 67 src=src, 68 type_=type_, 69 ) 70 71 governance_file_input.additional_properties = d 72 return governance_file_input
5class GovernanceFileType(str, Enum): 6 FILE = "FILE" 7 LINK = "LINK" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
26@_attrs_define 27class GovernanceRequirement: 28 """ 29 Attributes: 30 id (str): The unique identifier for the requirement 31 name (str): The name of the requirement 32 description (str): A brief description of the requirement 33 type_ (GovernanceType): The types of governance requirements that can be enforced 34 path (str): S3 prefix where files for the requirement are saved 35 scope (GovernanceScope): The levels at which governance requirements can be enforced 36 contact_ids (list[str]): The IDs of governance contacts assigned to the requirement. 37 expiration (GovernanceExpiry): 38 created_by (str): 39 created_at (datetime.datetime): 40 updated_at (datetime.datetime): 41 project_id (str | Unset): The project ID if the requirement is project scope 42 acceptance (GovernanceScope | None | Unset): Specifies the level at which it is satisfied 43 enactment_date (datetime.datetime | None | Unset): The date of enactment for a requirement 44 supplemental_docs (list[GovernanceFile] | None | Unset): Optional files with extra information, e.g. templates 45 for documents, links, etc 46 file (GovernanceFile | None | Unset): 47 authorship (GovernanceScope | None | Unset): Who needs to supply the agreement document 48 project_file_map (GovernanceRequirementProjectFileMap | None | Unset): Files supplied by each project when 49 authorship is project 50 verification_method (GovernanceTrainingVerification | None | Unset): The value indicating how the completion of 51 the training is verified. 52 """ 53 54 id: str 55 name: str 56 description: str 57 type_: GovernanceType 58 path: str 59 scope: GovernanceScope 60 contact_ids: list[str] 61 expiration: GovernanceExpiry 62 created_by: str 63 created_at: datetime.datetime 64 updated_at: datetime.datetime 65 project_id: str | Unset = UNSET 66 acceptance: GovernanceScope | None | Unset = UNSET 67 enactment_date: datetime.datetime | None | Unset = UNSET 68 supplemental_docs: list[GovernanceFile] | None | Unset = UNSET 69 file: GovernanceFile | None | Unset = UNSET 70 authorship: GovernanceScope | None | Unset = UNSET 71 project_file_map: GovernanceRequirementProjectFileMap | None | Unset = UNSET 72 verification_method: GovernanceTrainingVerification | None | Unset = UNSET 73 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 74 75 def to_dict(self) -> dict[str, Any]: 76 from ..models.governance_file import GovernanceFile 77 from ..models.governance_requirement_project_file_map import GovernanceRequirementProjectFileMap 78 79 id = self.id 80 81 name = self.name 82 83 description = self.description 84 85 type_ = self.type_.value 86 87 path = self.path 88 89 scope = self.scope.value 90 91 contact_ids = self.contact_ids 92 93 expiration = self.expiration.to_dict() 94 95 created_by = self.created_by 96 97 created_at = self.created_at.isoformat() 98 99 updated_at = self.updated_at.isoformat() 100 101 project_id = self.project_id 102 103 acceptance: None | str | Unset 104 if isinstance(self.acceptance, Unset): 105 acceptance = UNSET 106 elif isinstance(self.acceptance, GovernanceScope): 107 acceptance = self.acceptance.value 108 else: 109 acceptance = self.acceptance 110 111 enactment_date: None | str | Unset 112 if isinstance(self.enactment_date, Unset): 113 enactment_date = UNSET 114 elif isinstance(self.enactment_date, datetime.datetime): 115 enactment_date = self.enactment_date.isoformat() 116 else: 117 enactment_date = self.enactment_date 118 119 supplemental_docs: list[dict[str, Any]] | None | Unset 120 if isinstance(self.supplemental_docs, Unset): 121 supplemental_docs = UNSET 122 elif isinstance(self.supplemental_docs, list): 123 supplemental_docs = [] 124 for supplemental_docs_type_0_item_data in self.supplemental_docs: 125 supplemental_docs_type_0_item = supplemental_docs_type_0_item_data.to_dict() 126 supplemental_docs.append(supplemental_docs_type_0_item) 127 128 else: 129 supplemental_docs = self.supplemental_docs 130 131 file: dict[str, Any] | None | Unset 132 if isinstance(self.file, Unset): 133 file = UNSET 134 elif isinstance(self.file, GovernanceFile): 135 file = self.file.to_dict() 136 else: 137 file = self.file 138 139 authorship: None | str | Unset 140 if isinstance(self.authorship, Unset): 141 authorship = UNSET 142 elif isinstance(self.authorship, GovernanceScope): 143 authorship = self.authorship.value 144 else: 145 authorship = self.authorship 146 147 project_file_map: dict[str, Any] | None | Unset 148 if isinstance(self.project_file_map, Unset): 149 project_file_map = UNSET 150 elif isinstance(self.project_file_map, GovernanceRequirementProjectFileMap): 151 project_file_map = self.project_file_map.to_dict() 152 else: 153 project_file_map = self.project_file_map 154 155 verification_method: None | str | Unset 156 if isinstance(self.verification_method, Unset): 157 verification_method = UNSET 158 elif isinstance(self.verification_method, GovernanceTrainingVerification): 159 verification_method = self.verification_method.value 160 else: 161 verification_method = self.verification_method 162 163 field_dict: dict[str, Any] = {} 164 field_dict.update(self.additional_properties) 165 field_dict.update( 166 { 167 "id": id, 168 "name": name, 169 "description": description, 170 "type": type_, 171 "path": path, 172 "scope": scope, 173 "contactIds": contact_ids, 174 "expiration": expiration, 175 "createdBy": created_by, 176 "createdAt": created_at, 177 "updatedAt": updated_at, 178 } 179 ) 180 if project_id is not UNSET: 181 field_dict["projectId"] = project_id 182 if acceptance is not UNSET: 183 field_dict["acceptance"] = acceptance 184 if enactment_date is not UNSET: 185 field_dict["enactmentDate"] = enactment_date 186 if supplemental_docs is not UNSET: 187 field_dict["supplementalDocs"] = supplemental_docs 188 if file is not UNSET: 189 field_dict["file"] = file 190 if authorship is not UNSET: 191 field_dict["authorship"] = authorship 192 if project_file_map is not UNSET: 193 field_dict["projectFileMap"] = project_file_map 194 if verification_method is not UNSET: 195 field_dict["verificationMethod"] = verification_method 196 197 return field_dict 198 199 @classmethod 200 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 201 from ..models.governance_expiry import GovernanceExpiry 202 from ..models.governance_file import GovernanceFile 203 from ..models.governance_requirement_project_file_map import GovernanceRequirementProjectFileMap 204 205 d = dict(src_dict) 206 id = d.pop("id") 207 208 name = d.pop("name") 209 210 description = d.pop("description") 211 212 type_ = GovernanceType(d.pop("type")) 213 214 path = d.pop("path") 215 216 scope = GovernanceScope(d.pop("scope")) 217 218 contact_ids = cast(list[str], d.pop("contactIds")) 219 220 expiration = GovernanceExpiry.from_dict(d.pop("expiration")) 221 222 created_by = d.pop("createdBy") 223 224 created_at = isoparse(d.pop("createdAt")) 225 226 updated_at = isoparse(d.pop("updatedAt")) 227 228 project_id = d.pop("projectId", UNSET) 229 230 def _parse_acceptance(data: object) -> GovernanceScope | None | Unset: 231 if data is None: 232 return data 233 if isinstance(data, Unset): 234 return data 235 try: 236 if not isinstance(data, str): 237 raise TypeError() 238 acceptance_type_1 = GovernanceScope(data) 239 240 return acceptance_type_1 241 except (TypeError, ValueError, AttributeError, KeyError): 242 pass 243 return cast(GovernanceScope | None | Unset, data) 244 245 acceptance = _parse_acceptance(d.pop("acceptance", UNSET)) 246 247 def _parse_enactment_date(data: object) -> datetime.datetime | None | Unset: 248 if data is None: 249 return data 250 if isinstance(data, Unset): 251 return data 252 try: 253 if not isinstance(data, str): 254 raise TypeError() 255 enactment_date_type_0 = isoparse(data) 256 257 return enactment_date_type_0 258 except (TypeError, ValueError, AttributeError, KeyError): 259 pass 260 return cast(datetime.datetime | None | Unset, data) 261 262 enactment_date = _parse_enactment_date(d.pop("enactmentDate", UNSET)) 263 264 def _parse_supplemental_docs(data: object) -> list[GovernanceFile] | None | Unset: 265 if data is None: 266 return data 267 if isinstance(data, Unset): 268 return data 269 try: 270 if not isinstance(data, list): 271 raise TypeError() 272 supplemental_docs_type_0 = [] 273 _supplemental_docs_type_0 = data 274 for supplemental_docs_type_0_item_data in _supplemental_docs_type_0: 275 supplemental_docs_type_0_item = GovernanceFile.from_dict(supplemental_docs_type_0_item_data) 276 277 supplemental_docs_type_0.append(supplemental_docs_type_0_item) 278 279 return supplemental_docs_type_0 280 except (TypeError, ValueError, AttributeError, KeyError): 281 pass 282 return cast(list[GovernanceFile] | None | Unset, data) 283 284 supplemental_docs = _parse_supplemental_docs(d.pop("supplementalDocs", UNSET)) 285 286 def _parse_file(data: object) -> GovernanceFile | None | Unset: 287 if data is None: 288 return data 289 if isinstance(data, Unset): 290 return data 291 try: 292 if not isinstance(data, dict): 293 raise TypeError() 294 file_type_1 = GovernanceFile.from_dict(data) 295 296 return file_type_1 297 except (TypeError, ValueError, AttributeError, KeyError): 298 pass 299 return cast(GovernanceFile | None | Unset, data) 300 301 file = _parse_file(d.pop("file", UNSET)) 302 303 def _parse_authorship(data: object) -> GovernanceScope | None | Unset: 304 if data is None: 305 return data 306 if isinstance(data, Unset): 307 return data 308 try: 309 if not isinstance(data, str): 310 raise TypeError() 311 authorship_type_1 = GovernanceScope(data) 312 313 return authorship_type_1 314 except (TypeError, ValueError, AttributeError, KeyError): 315 pass 316 return cast(GovernanceScope | None | Unset, data) 317 318 authorship = _parse_authorship(d.pop("authorship", UNSET)) 319 320 def _parse_project_file_map(data: object) -> GovernanceRequirementProjectFileMap | None | Unset: 321 if data is None: 322 return data 323 if isinstance(data, Unset): 324 return data 325 try: 326 if not isinstance(data, dict): 327 raise TypeError() 328 project_file_map_type_0 = GovernanceRequirementProjectFileMap.from_dict(data) 329 330 return project_file_map_type_0 331 except (TypeError, ValueError, AttributeError, KeyError): 332 pass 333 return cast(GovernanceRequirementProjectFileMap | None | Unset, data) 334 335 project_file_map = _parse_project_file_map(d.pop("projectFileMap", UNSET)) 336 337 def _parse_verification_method(data: object) -> GovernanceTrainingVerification | None | Unset: 338 if data is None: 339 return data 340 if isinstance(data, Unset): 341 return data 342 try: 343 if not isinstance(data, str): 344 raise TypeError() 345 verification_method_type_1 = GovernanceTrainingVerification(data) 346 347 return verification_method_type_1 348 except (TypeError, ValueError, AttributeError, KeyError): 349 pass 350 return cast(GovernanceTrainingVerification | None | Unset, data) 351 352 verification_method = _parse_verification_method(d.pop("verificationMethod", UNSET)) 353 354 governance_requirement = cls( 355 id=id, 356 name=name, 357 description=description, 358 type_=type_, 359 path=path, 360 scope=scope, 361 contact_ids=contact_ids, 362 expiration=expiration, 363 created_by=created_by, 364 created_at=created_at, 365 updated_at=updated_at, 366 project_id=project_id, 367 acceptance=acceptance, 368 enactment_date=enactment_date, 369 supplemental_docs=supplemental_docs, 370 file=file, 371 authorship=authorship, 372 project_file_map=project_file_map, 373 verification_method=verification_method, 374 ) 375 376 governance_requirement.additional_properties = d 377 return governance_requirement 378 379 @property 380 def additional_keys(self) -> list[str]: 381 return list(self.additional_properties.keys()) 382 383 def __getitem__(self, key: str) -> Any: 384 return self.additional_properties[key] 385 386 def __setitem__(self, key: str, value: Any) -> None: 387 self.additional_properties[key] = value 388 389 def __delitem__(self, key: str) -> None: 390 del self.additional_properties[key] 391 392 def __contains__(self, key: str) -> bool: 393 return key in self.additional_properties
Attributes:
- id (str): The unique identifier for the requirement
- name (str): The name of the requirement
- description (str): A brief description of the requirement
- type_ (GovernanceType): The types of governance requirements that can be enforced
- path (str): S3 prefix where files for the requirement are saved
- scope (GovernanceScope): The levels at which governance requirements can be enforced
- contact_ids (list[str]): The IDs of governance contacts assigned to the requirement.
- expiration (GovernanceExpiry):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- project_id (str | Unset): The project ID if the requirement is project scope
- acceptance (GovernanceScope | None | Unset): Specifies the level at which it is satisfied
- enactment_date (datetime.datetime | None | Unset): The date of enactment for a requirement
- supplemental_docs (list[GovernanceFile] | None | Unset): Optional files with extra information, e.g. templates for documents, links, etc
- file (GovernanceFile | None | Unset):
- authorship (GovernanceScope | None | Unset): Who needs to supply the agreement document
- project_file_map (GovernanceRequirementProjectFileMap | None | Unset): Files supplied by each project when authorship is project
- verification_method (GovernanceTrainingVerification | None | Unset): The value indicating how the completion of the training is verified.
42def __init__(self, id, name, description, type_, path, scope, contact_ids, expiration, created_by, created_at, updated_at, project_id=attr_dict['project_id'].default, acceptance=attr_dict['acceptance'].default, enactment_date=attr_dict['enactment_date'].default, supplemental_docs=attr_dict['supplemental_docs'].default, file=attr_dict['file'].default, authorship=attr_dict['authorship'].default, project_file_map=attr_dict['project_file_map'].default, verification_method=attr_dict['verification_method'].default): 43 self.id = id 44 self.name = name 45 self.description = description 46 self.type_ = type_ 47 self.path = path 48 self.scope = scope 49 self.contact_ids = contact_ids 50 self.expiration = expiration 51 self.created_by = created_by 52 self.created_at = created_at 53 self.updated_at = updated_at 54 self.project_id = project_id 55 self.acceptance = acceptance 56 self.enactment_date = enactment_date 57 self.supplemental_docs = supplemental_docs 58 self.file = file 59 self.authorship = authorship 60 self.project_file_map = project_file_map 61 self.verification_method = verification_method 62 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceRequirement.
75 def to_dict(self) -> dict[str, Any]: 76 from ..models.governance_file import GovernanceFile 77 from ..models.governance_requirement_project_file_map import GovernanceRequirementProjectFileMap 78 79 id = self.id 80 81 name = self.name 82 83 description = self.description 84 85 type_ = self.type_.value 86 87 path = self.path 88 89 scope = self.scope.value 90 91 contact_ids = self.contact_ids 92 93 expiration = self.expiration.to_dict() 94 95 created_by = self.created_by 96 97 created_at = self.created_at.isoformat() 98 99 updated_at = self.updated_at.isoformat() 100 101 project_id = self.project_id 102 103 acceptance: None | str | Unset 104 if isinstance(self.acceptance, Unset): 105 acceptance = UNSET 106 elif isinstance(self.acceptance, GovernanceScope): 107 acceptance = self.acceptance.value 108 else: 109 acceptance = self.acceptance 110 111 enactment_date: None | str | Unset 112 if isinstance(self.enactment_date, Unset): 113 enactment_date = UNSET 114 elif isinstance(self.enactment_date, datetime.datetime): 115 enactment_date = self.enactment_date.isoformat() 116 else: 117 enactment_date = self.enactment_date 118 119 supplemental_docs: list[dict[str, Any]] | None | Unset 120 if isinstance(self.supplemental_docs, Unset): 121 supplemental_docs = UNSET 122 elif isinstance(self.supplemental_docs, list): 123 supplemental_docs = [] 124 for supplemental_docs_type_0_item_data in self.supplemental_docs: 125 supplemental_docs_type_0_item = supplemental_docs_type_0_item_data.to_dict() 126 supplemental_docs.append(supplemental_docs_type_0_item) 127 128 else: 129 supplemental_docs = self.supplemental_docs 130 131 file: dict[str, Any] | None | Unset 132 if isinstance(self.file, Unset): 133 file = UNSET 134 elif isinstance(self.file, GovernanceFile): 135 file = self.file.to_dict() 136 else: 137 file = self.file 138 139 authorship: None | str | Unset 140 if isinstance(self.authorship, Unset): 141 authorship = UNSET 142 elif isinstance(self.authorship, GovernanceScope): 143 authorship = self.authorship.value 144 else: 145 authorship = self.authorship 146 147 project_file_map: dict[str, Any] | None | Unset 148 if isinstance(self.project_file_map, Unset): 149 project_file_map = UNSET 150 elif isinstance(self.project_file_map, GovernanceRequirementProjectFileMap): 151 project_file_map = self.project_file_map.to_dict() 152 else: 153 project_file_map = self.project_file_map 154 155 verification_method: None | str | Unset 156 if isinstance(self.verification_method, Unset): 157 verification_method = UNSET 158 elif isinstance(self.verification_method, GovernanceTrainingVerification): 159 verification_method = self.verification_method.value 160 else: 161 verification_method = self.verification_method 162 163 field_dict: dict[str, Any] = {} 164 field_dict.update(self.additional_properties) 165 field_dict.update( 166 { 167 "id": id, 168 "name": name, 169 "description": description, 170 "type": type_, 171 "path": path, 172 "scope": scope, 173 "contactIds": contact_ids, 174 "expiration": expiration, 175 "createdBy": created_by, 176 "createdAt": created_at, 177 "updatedAt": updated_at, 178 } 179 ) 180 if project_id is not UNSET: 181 field_dict["projectId"] = project_id 182 if acceptance is not UNSET: 183 field_dict["acceptance"] = acceptance 184 if enactment_date is not UNSET: 185 field_dict["enactmentDate"] = enactment_date 186 if supplemental_docs is not UNSET: 187 field_dict["supplementalDocs"] = supplemental_docs 188 if file is not UNSET: 189 field_dict["file"] = file 190 if authorship is not UNSET: 191 field_dict["authorship"] = authorship 192 if project_file_map is not UNSET: 193 field_dict["projectFileMap"] = project_file_map 194 if verification_method is not UNSET: 195 field_dict["verificationMethod"] = verification_method 196 197 return field_dict
199 @classmethod 200 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 201 from ..models.governance_expiry import GovernanceExpiry 202 from ..models.governance_file import GovernanceFile 203 from ..models.governance_requirement_project_file_map import GovernanceRequirementProjectFileMap 204 205 d = dict(src_dict) 206 id = d.pop("id") 207 208 name = d.pop("name") 209 210 description = d.pop("description") 211 212 type_ = GovernanceType(d.pop("type")) 213 214 path = d.pop("path") 215 216 scope = GovernanceScope(d.pop("scope")) 217 218 contact_ids = cast(list[str], d.pop("contactIds")) 219 220 expiration = GovernanceExpiry.from_dict(d.pop("expiration")) 221 222 created_by = d.pop("createdBy") 223 224 created_at = isoparse(d.pop("createdAt")) 225 226 updated_at = isoparse(d.pop("updatedAt")) 227 228 project_id = d.pop("projectId", UNSET) 229 230 def _parse_acceptance(data: object) -> GovernanceScope | None | Unset: 231 if data is None: 232 return data 233 if isinstance(data, Unset): 234 return data 235 try: 236 if not isinstance(data, str): 237 raise TypeError() 238 acceptance_type_1 = GovernanceScope(data) 239 240 return acceptance_type_1 241 except (TypeError, ValueError, AttributeError, KeyError): 242 pass 243 return cast(GovernanceScope | None | Unset, data) 244 245 acceptance = _parse_acceptance(d.pop("acceptance", UNSET)) 246 247 def _parse_enactment_date(data: object) -> datetime.datetime | None | Unset: 248 if data is None: 249 return data 250 if isinstance(data, Unset): 251 return data 252 try: 253 if not isinstance(data, str): 254 raise TypeError() 255 enactment_date_type_0 = isoparse(data) 256 257 return enactment_date_type_0 258 except (TypeError, ValueError, AttributeError, KeyError): 259 pass 260 return cast(datetime.datetime | None | Unset, data) 261 262 enactment_date = _parse_enactment_date(d.pop("enactmentDate", UNSET)) 263 264 def _parse_supplemental_docs(data: object) -> list[GovernanceFile] | None | Unset: 265 if data is None: 266 return data 267 if isinstance(data, Unset): 268 return data 269 try: 270 if not isinstance(data, list): 271 raise TypeError() 272 supplemental_docs_type_0 = [] 273 _supplemental_docs_type_0 = data 274 for supplemental_docs_type_0_item_data in _supplemental_docs_type_0: 275 supplemental_docs_type_0_item = GovernanceFile.from_dict(supplemental_docs_type_0_item_data) 276 277 supplemental_docs_type_0.append(supplemental_docs_type_0_item) 278 279 return supplemental_docs_type_0 280 except (TypeError, ValueError, AttributeError, KeyError): 281 pass 282 return cast(list[GovernanceFile] | None | Unset, data) 283 284 supplemental_docs = _parse_supplemental_docs(d.pop("supplementalDocs", UNSET)) 285 286 def _parse_file(data: object) -> GovernanceFile | None | Unset: 287 if data is None: 288 return data 289 if isinstance(data, Unset): 290 return data 291 try: 292 if not isinstance(data, dict): 293 raise TypeError() 294 file_type_1 = GovernanceFile.from_dict(data) 295 296 return file_type_1 297 except (TypeError, ValueError, AttributeError, KeyError): 298 pass 299 return cast(GovernanceFile | None | Unset, data) 300 301 file = _parse_file(d.pop("file", UNSET)) 302 303 def _parse_authorship(data: object) -> GovernanceScope | None | Unset: 304 if data is None: 305 return data 306 if isinstance(data, Unset): 307 return data 308 try: 309 if not isinstance(data, str): 310 raise TypeError() 311 authorship_type_1 = GovernanceScope(data) 312 313 return authorship_type_1 314 except (TypeError, ValueError, AttributeError, KeyError): 315 pass 316 return cast(GovernanceScope | None | Unset, data) 317 318 authorship = _parse_authorship(d.pop("authorship", UNSET)) 319 320 def _parse_project_file_map(data: object) -> GovernanceRequirementProjectFileMap | None | Unset: 321 if data is None: 322 return data 323 if isinstance(data, Unset): 324 return data 325 try: 326 if not isinstance(data, dict): 327 raise TypeError() 328 project_file_map_type_0 = GovernanceRequirementProjectFileMap.from_dict(data) 329 330 return project_file_map_type_0 331 except (TypeError, ValueError, AttributeError, KeyError): 332 pass 333 return cast(GovernanceRequirementProjectFileMap | None | Unset, data) 334 335 project_file_map = _parse_project_file_map(d.pop("projectFileMap", UNSET)) 336 337 def _parse_verification_method(data: object) -> GovernanceTrainingVerification | None | Unset: 338 if data is None: 339 return data 340 if isinstance(data, Unset): 341 return data 342 try: 343 if not isinstance(data, str): 344 raise TypeError() 345 verification_method_type_1 = GovernanceTrainingVerification(data) 346 347 return verification_method_type_1 348 except (TypeError, ValueError, AttributeError, KeyError): 349 pass 350 return cast(GovernanceTrainingVerification | None | Unset, data) 351 352 verification_method = _parse_verification_method(d.pop("verificationMethod", UNSET)) 353 354 governance_requirement = cls( 355 id=id, 356 name=name, 357 description=description, 358 type_=type_, 359 path=path, 360 scope=scope, 361 contact_ids=contact_ids, 362 expiration=expiration, 363 created_by=created_by, 364 created_at=created_at, 365 updated_at=updated_at, 366 project_id=project_id, 367 acceptance=acceptance, 368 enactment_date=enactment_date, 369 supplemental_docs=supplemental_docs, 370 file=file, 371 authorship=authorship, 372 project_file_map=project_file_map, 373 verification_method=verification_method, 374 ) 375 376 governance_requirement.additional_properties = d 377 return governance_requirement
17@_attrs_define 18class GovernanceRequirementProjectFileMap: 19 """Files supplied by each project when authorship is project""" 20 21 additional_properties: dict[str, GovernanceFile] = _attrs_field(init=False, factory=dict) 22 23 def to_dict(self) -> dict[str, Any]: 24 field_dict: dict[str, Any] = {} 25 for prop_name, prop in self.additional_properties.items(): 26 field_dict[prop_name] = prop.to_dict() 27 28 return field_dict 29 30 @classmethod 31 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 32 from ..models.governance_file import GovernanceFile 33 34 d = dict(src_dict) 35 governance_requirement_project_file_map = cls() 36 37 additional_properties = {} 38 for prop_name, prop_dict in d.items(): 39 additional_property = GovernanceFile.from_dict(prop_dict) 40 41 additional_properties[prop_name] = additional_property 42 43 governance_requirement_project_file_map.additional_properties = additional_properties 44 return governance_requirement_project_file_map 45 46 @property 47 def additional_keys(self) -> list[str]: 48 return list(self.additional_properties.keys()) 49 50 def __getitem__(self, key: str) -> GovernanceFile: 51 return self.additional_properties[key] 52 53 def __setitem__(self, key: str, value: GovernanceFile) -> None: 54 self.additional_properties[key] = value 55 56 def __delitem__(self, key: str) -> None: 57 del self.additional_properties[key] 58 59 def __contains__(self, key: str) -> bool: 60 return key in self.additional_properties
Files supplied by each project when authorship is project
Method generated by attrs for class GovernanceRequirementProjectFileMap.
30 @classmethod 31 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 32 from ..models.governance_file import GovernanceFile 33 34 d = dict(src_dict) 35 governance_requirement_project_file_map = cls() 36 37 additional_properties = {} 38 for prop_name, prop_dict in d.items(): 39 additional_property = GovernanceFile.from_dict(prop_dict) 40 41 additional_properties[prop_name] = additional_property 42 43 governance_requirement_project_file_map.additional_properties = additional_properties 44 return governance_requirement_project_file_map
5class GovernanceScope(str, Enum): 6 PROJECT = "PROJECT" 7 TENANT = "TENANT" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
5class GovernanceTrainingVerification(str, Enum): 6 CERTIFICATE = "CERTIFICATE" 7 SELF = "SELF" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
5class GovernanceType(str, Enum): 6 AGREEMENT = "AGREEMENT" 7 DOCUMENT = "DOCUMENT" 8 TRAINING = "TRAINING" 9 UNKNOWN = "UNKNOWN" 10 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 11 12 def __str__(self) -> str: 13 return str(self.value) 14 15 @classmethod 16 def _missing_(cls, number): 17 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
15@_attrs_define 16class GroupCost: 17 """ 18 Attributes: 19 name (str | Unset): Task status group Example: CACHED. 20 cost (float | Unset): Cost 21 """ 22 23 name: str | Unset = UNSET 24 cost: float | Unset = UNSET 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 name = self.name 29 30 cost = self.cost 31 32 field_dict: dict[str, Any] = {} 33 field_dict.update(self.additional_properties) 34 field_dict.update({}) 35 if name is not UNSET: 36 field_dict["name"] = name 37 if cost is not UNSET: 38 field_dict["cost"] = cost 39 40 return field_dict 41 42 @classmethod 43 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 44 d = dict(src_dict) 45 name = d.pop("name", UNSET) 46 47 cost = d.pop("cost", UNSET) 48 49 group_cost = cls( 50 name=name, 51 cost=cost, 52 ) 53 54 group_cost.additional_properties = d 55 return group_cost 56 57 @property 58 def additional_keys(self) -> list[str]: 59 return list(self.additional_properties.keys()) 60 61 def __getitem__(self, key: str) -> Any: 62 return self.additional_properties[key] 63 64 def __setitem__(self, key: str, value: Any) -> None: 65 self.additional_properties[key] = value 66 67 def __delitem__(self, key: str) -> None: 68 del self.additional_properties[key] 69 70 def __contains__(self, key: str) -> bool: 71 return key in self.additional_properties
Attributes:
- name (str | Unset): Task status group Example: CACHED.
- cost (float | Unset): Cost
25def __init__(self, name=attr_dict['name'].default, cost=attr_dict['cost'].default): 26 self.name = name 27 self.cost = cost 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GroupCost.
27 def to_dict(self) -> dict[str, Any]: 28 name = self.name 29 30 cost = self.cost 31 32 field_dict: dict[str, Any] = {} 33 field_dict.update(self.additional_properties) 34 field_dict.update({}) 35 if name is not UNSET: 36 field_dict["name"] = name 37 if cost is not UNSET: 38 field_dict["cost"] = cost 39 40 return field_dict
20@_attrs_define 21class ImportDataRequest: 22 """ 23 Attributes: 24 name (str): Name of the dataset 25 public_ids (list[str]): 26 description (str | Unset): Description of the dataset 27 tags (list[Tag] | None | Unset): List of tags to apply to the dataset 28 download_method (ImportDataRequestDownloadMethod | Unset): Method to download FastQ files Default: 29 ImportDataRequestDownloadMethod.SRATOOLS. 30 dbgap_key (None | str | Unset): dbGaP repository key (used to access protected data on SRA) 31 """ 32 33 name: str 34 public_ids: list[str] 35 description: str | Unset = UNSET 36 tags: list[Tag] | None | Unset = UNSET 37 download_method: ImportDataRequestDownloadMethod | Unset = ImportDataRequestDownloadMethod.SRATOOLS 38 dbgap_key: None | str | Unset = UNSET 39 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 40 41 def to_dict(self) -> dict[str, Any]: 42 name = self.name 43 44 public_ids = self.public_ids 45 46 description = self.description 47 48 tags: list[dict[str, Any]] | None | Unset 49 if isinstance(self.tags, Unset): 50 tags = UNSET 51 elif isinstance(self.tags, list): 52 tags = [] 53 for tags_type_0_item_data in self.tags: 54 tags_type_0_item = tags_type_0_item_data.to_dict() 55 tags.append(tags_type_0_item) 56 57 else: 58 tags = self.tags 59 60 download_method: str | Unset = UNSET 61 if not isinstance(self.download_method, Unset): 62 download_method = self.download_method.value 63 64 dbgap_key: None | str | Unset 65 if isinstance(self.dbgap_key, Unset): 66 dbgap_key = UNSET 67 else: 68 dbgap_key = self.dbgap_key 69 70 field_dict: dict[str, Any] = {} 71 field_dict.update(self.additional_properties) 72 field_dict.update( 73 { 74 "name": name, 75 "publicIds": public_ids, 76 } 77 ) 78 if description is not UNSET: 79 field_dict["description"] = description 80 if tags is not UNSET: 81 field_dict["tags"] = tags 82 if download_method is not UNSET: 83 field_dict["downloadMethod"] = download_method 84 if dbgap_key is not UNSET: 85 field_dict["dbgapKey"] = dbgap_key 86 87 return field_dict 88 89 @classmethod 90 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 91 from ..models.tag import Tag 92 93 d = dict(src_dict) 94 name = d.pop("name") 95 96 public_ids = cast(list[str], d.pop("publicIds")) 97 98 description = d.pop("description", UNSET) 99 100 def _parse_tags(data: object) -> list[Tag] | None | Unset: 101 if data is None: 102 return data 103 if isinstance(data, Unset): 104 return data 105 try: 106 if not isinstance(data, list): 107 raise TypeError() 108 tags_type_0 = [] 109 _tags_type_0 = data 110 for tags_type_0_item_data in _tags_type_0: 111 tags_type_0_item = Tag.from_dict(tags_type_0_item_data) 112 113 tags_type_0.append(tags_type_0_item) 114 115 return tags_type_0 116 except (TypeError, ValueError, AttributeError, KeyError): 117 pass 118 return cast(list[Tag] | None | Unset, data) 119 120 tags = _parse_tags(d.pop("tags", UNSET)) 121 122 _download_method = d.pop("downloadMethod", UNSET) 123 download_method: ImportDataRequestDownloadMethod | Unset 124 if isinstance(_download_method, Unset): 125 download_method = UNSET 126 else: 127 download_method = ImportDataRequestDownloadMethod(_download_method) 128 129 def _parse_dbgap_key(data: object) -> None | str | Unset: 130 if data is None: 131 return data 132 if isinstance(data, Unset): 133 return data 134 return cast(None | str | Unset, data) 135 136 dbgap_key = _parse_dbgap_key(d.pop("dbgapKey", UNSET)) 137 138 import_data_request = cls( 139 name=name, 140 public_ids=public_ids, 141 description=description, 142 tags=tags, 143 download_method=download_method, 144 dbgap_key=dbgap_key, 145 ) 146 147 import_data_request.additional_properties = d 148 return import_data_request 149 150 @property 151 def additional_keys(self) -> list[str]: 152 return list(self.additional_properties.keys()) 153 154 def __getitem__(self, key: str) -> Any: 155 return self.additional_properties[key] 156 157 def __setitem__(self, key: str, value: Any) -> None: 158 self.additional_properties[key] = value 159 160 def __delitem__(self, key: str) -> None: 161 del self.additional_properties[key] 162 163 def __contains__(self, key: str) -> bool: 164 return key in self.additional_properties
Attributes:
- name (str): Name of the dataset
- public_ids (list[str]):
- description (str | Unset): Description of the dataset
- tags (list[Tag] | None | Unset): List of tags to apply to the dataset
- download_method (ImportDataRequestDownloadMethod | Unset): Method to download FastQ files Default: ImportDataRequestDownloadMethod.SRATOOLS.
- dbgap_key (None | str | Unset): dbGaP repository key (used to access protected data on SRA)
29def __init__(self, name, public_ids, description=attr_dict['description'].default, tags=attr_dict['tags'].default, download_method=attr_dict['download_method'].default, dbgap_key=attr_dict['dbgap_key'].default): 30 self.name = name 31 self.public_ids = public_ids 32 self.description = description 33 self.tags = tags 34 self.download_method = download_method 35 self.dbgap_key = dbgap_key 36 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ImportDataRequest.
41 def to_dict(self) -> dict[str, Any]: 42 name = self.name 43 44 public_ids = self.public_ids 45 46 description = self.description 47 48 tags: list[dict[str, Any]] | None | Unset 49 if isinstance(self.tags, Unset): 50 tags = UNSET 51 elif isinstance(self.tags, list): 52 tags = [] 53 for tags_type_0_item_data in self.tags: 54 tags_type_0_item = tags_type_0_item_data.to_dict() 55 tags.append(tags_type_0_item) 56 57 else: 58 tags = self.tags 59 60 download_method: str | Unset = UNSET 61 if not isinstance(self.download_method, Unset): 62 download_method = self.download_method.value 63 64 dbgap_key: None | str | Unset 65 if isinstance(self.dbgap_key, Unset): 66 dbgap_key = UNSET 67 else: 68 dbgap_key = self.dbgap_key 69 70 field_dict: dict[str, Any] = {} 71 field_dict.update(self.additional_properties) 72 field_dict.update( 73 { 74 "name": name, 75 "publicIds": public_ids, 76 } 77 ) 78 if description is not UNSET: 79 field_dict["description"] = description 80 if tags is not UNSET: 81 field_dict["tags"] = tags 82 if download_method is not UNSET: 83 field_dict["downloadMethod"] = download_method 84 if dbgap_key is not UNSET: 85 field_dict["dbgapKey"] = dbgap_key 86 87 return field_dict
89 @classmethod 90 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 91 from ..models.tag import Tag 92 93 d = dict(src_dict) 94 name = d.pop("name") 95 96 public_ids = cast(list[str], d.pop("publicIds")) 97 98 description = d.pop("description", UNSET) 99 100 def _parse_tags(data: object) -> list[Tag] | None | Unset: 101 if data is None: 102 return data 103 if isinstance(data, Unset): 104 return data 105 try: 106 if not isinstance(data, list): 107 raise TypeError() 108 tags_type_0 = [] 109 _tags_type_0 = data 110 for tags_type_0_item_data in _tags_type_0: 111 tags_type_0_item = Tag.from_dict(tags_type_0_item_data) 112 113 tags_type_0.append(tags_type_0_item) 114 115 return tags_type_0 116 except (TypeError, ValueError, AttributeError, KeyError): 117 pass 118 return cast(list[Tag] | None | Unset, data) 119 120 tags = _parse_tags(d.pop("tags", UNSET)) 121 122 _download_method = d.pop("downloadMethod", UNSET) 123 download_method: ImportDataRequestDownloadMethod | Unset 124 if isinstance(_download_method, Unset): 125 download_method = UNSET 126 else: 127 download_method = ImportDataRequestDownloadMethod(_download_method) 128 129 def _parse_dbgap_key(data: object) -> None | str | Unset: 130 if data is None: 131 return data 132 if isinstance(data, Unset): 133 return data 134 return cast(None | str | Unset, data) 135 136 dbgap_key = _parse_dbgap_key(d.pop("dbgapKey", UNSET)) 137 138 import_data_request = cls( 139 name=name, 140 public_ids=public_ids, 141 description=description, 142 tags=tags, 143 download_method=download_method, 144 dbgap_key=dbgap_key, 145 ) 146 147 import_data_request.additional_properties = d 148 return import_data_request
5class ImportDataRequestDownloadMethod(str, Enum): 6 ASPERA = "aspera" 7 FTP = "ftp" 8 SRATOOLS = "sratools" 9 UNKNOWN = "UNKNOWN" 10 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 11 12 def __str__(self) -> str: 13 return str(self.value) 14 15 @classmethod 16 def _missing_(cls, number): 17 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
13@_attrs_define 14class InviteUserRequest: 15 """ 16 Attributes: 17 name (str): 18 organization (str): 19 email (str): 20 """ 21 22 name: str 23 organization: str 24 email: str 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 name = self.name 29 30 organization = self.organization 31 32 email = self.email 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "name": name, 39 "organization": organization, 40 "email": email, 41 } 42 ) 43 44 return field_dict 45 46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 name = d.pop("name") 50 51 organization = d.pop("organization") 52 53 email = d.pop("email") 54 55 invite_user_request = cls( 56 name=name, 57 organization=organization, 58 email=email, 59 ) 60 61 invite_user_request.additional_properties = d 62 return invite_user_request 63 64 @property 65 def additional_keys(self) -> list[str]: 66 return list(self.additional_properties.keys()) 67 68 def __getitem__(self, key: str) -> Any: 69 return self.additional_properties[key] 70 71 def __setitem__(self, key: str, value: Any) -> None: 72 self.additional_properties[key] = value 73 74 def __delitem__(self, key: str) -> None: 75 del self.additional_properties[key] 76 77 def __contains__(self, key: str) -> bool: 78 return key in self.additional_properties
Attributes:
- name (str):
- organization (str):
- email (str):
26def __init__(self, name, organization, email): 27 self.name = name 28 self.organization = organization 29 self.email = email 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class InviteUserRequest.
27 def to_dict(self) -> dict[str, Any]: 28 name = self.name 29 30 organization = self.organization 31 32 email = self.email 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "name": name, 39 "organization": organization, 40 "email": email, 41 } 42 ) 43 44 return field_dict
46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 name = d.pop("name") 50 51 organization = d.pop("organization") 52 53 email = d.pop("email") 54 55 invite_user_request = cls( 56 name=name, 57 organization=organization, 58 email=email, 59 ) 60 61 invite_user_request.additional_properties = d 62 return invite_user_request
13@_attrs_define 14class InviteUserResponse: 15 """ 16 Attributes: 17 message (str): 18 """ 19 20 message: str 21 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 22 23 def to_dict(self) -> dict[str, Any]: 24 message = self.message 25 26 field_dict: dict[str, Any] = {} 27 field_dict.update(self.additional_properties) 28 field_dict.update( 29 { 30 "message": message, 31 } 32 ) 33 34 return field_dict 35 36 @classmethod 37 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 38 d = dict(src_dict) 39 message = d.pop("message") 40 41 invite_user_response = cls( 42 message=message, 43 ) 44 45 invite_user_response.additional_properties = d 46 return invite_user_response 47 48 @property 49 def additional_keys(self) -> list[str]: 50 return list(self.additional_properties.keys()) 51 52 def __getitem__(self, key: str) -> Any: 53 return self.additional_properties[key] 54 55 def __setitem__(self, key: str, value: Any) -> None: 56 self.additional_properties[key] = value 57 58 def __delitem__(self, key: str) -> None: 59 del self.additional_properties[key] 60 61 def __contains__(self, key: str) -> bool: 62 return key in self.additional_properties
Attributes:
- message (str):
24def __init__(self, message): 25 self.message = message 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class InviteUserResponse.
5class ListEventsEntityType(str, Enum): 6 BILLINGACCOUNT = "BillingAccount" 7 DATASET = "Dataset" 8 NOTEBOOKINSTANCE = "NotebookInstance" 9 PROCESS = "Process" 10 PROJECT = "Project" 11 SAMPLE = "Sample" 12 USER = "User" 13 USERPROJECTASSIGNMENT = "UserProjectAssignment" 14 UNKNOWN = "UNKNOWN" 15 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 16 17 def __str__(self) -> str: 18 return str(self.value) 19 20 @classmethod 21 def _missing_(cls, number): 22 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
15@_attrs_define 16class LogEntry: 17 """ 18 Attributes: 19 message (str): 20 timestamp (int | Unset): UNIX timestamp in milliseconds, might be blank if we don't have this info 21 """ 22 23 message: str 24 timestamp: int | Unset = UNSET 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 message = self.message 29 30 timestamp = self.timestamp 31 32 field_dict: dict[str, Any] = {} 33 field_dict.update(self.additional_properties) 34 field_dict.update( 35 { 36 "message": message, 37 } 38 ) 39 if timestamp is not UNSET: 40 field_dict["timestamp"] = timestamp 41 42 return field_dict 43 44 @classmethod 45 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 46 d = dict(src_dict) 47 message = d.pop("message") 48 49 timestamp = d.pop("timestamp", UNSET) 50 51 log_entry = cls( 52 message=message, 53 timestamp=timestamp, 54 ) 55 56 log_entry.additional_properties = d 57 return log_entry 58 59 @property 60 def additional_keys(self) -> list[str]: 61 return list(self.additional_properties.keys()) 62 63 def __getitem__(self, key: str) -> Any: 64 return self.additional_properties[key] 65 66 def __setitem__(self, key: str, value: Any) -> None: 67 self.additional_properties[key] = value 68 69 def __delitem__(self, key: str) -> None: 70 del self.additional_properties[key] 71 72 def __contains__(self, key: str) -> bool: 73 return key in self.additional_properties
Attributes:
- message (str):
- timestamp (int | Unset): UNIX timestamp in milliseconds, might be blank if we don't have this info
25def __init__(self, message, timestamp=attr_dict['timestamp'].default): 26 self.message = message 27 self.timestamp = timestamp 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class LogEntry.
27 def to_dict(self) -> dict[str, Any]: 28 message = self.message 29 30 timestamp = self.timestamp 31 32 field_dict: dict[str, Any] = {} 33 field_dict.update(self.additional_properties) 34 field_dict.update( 35 { 36 "message": message, 37 } 38 ) 39 if timestamp is not UNSET: 40 field_dict["timestamp"] = timestamp 41 42 return field_dict
44 @classmethod 45 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 46 d = dict(src_dict) 47 message = d.pop("message") 48 49 timestamp = d.pop("timestamp", UNSET) 50 51 log_entry = cls( 52 message=message, 53 timestamp=timestamp, 54 ) 55 56 log_entry.additional_properties = d 57 return log_entry
13@_attrs_define 14class LoginProvider: 15 """ 16 Attributes: 17 id (str): 18 name (str): 19 description (str): 20 logo_url (str): 21 """ 22 23 id: str 24 name: str 25 description: str 26 logo_url: str 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 id = self.id 31 32 name = self.name 33 34 description = self.description 35 36 logo_url = self.logo_url 37 38 field_dict: dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "id": id, 43 "name": name, 44 "description": description, 45 "logoUrl": logo_url, 46 } 47 ) 48 49 return field_dict 50 51 @classmethod 52 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 53 d = dict(src_dict) 54 id = d.pop("id") 55 56 name = d.pop("name") 57 58 description = d.pop("description") 59 60 logo_url = d.pop("logoUrl") 61 62 login_provider = cls( 63 id=id, 64 name=name, 65 description=description, 66 logo_url=logo_url, 67 ) 68 69 login_provider.additional_properties = d 70 return login_provider 71 72 @property 73 def additional_keys(self) -> list[str]: 74 return list(self.additional_properties.keys()) 75 76 def __getitem__(self, key: str) -> Any: 77 return self.additional_properties[key] 78 79 def __setitem__(self, key: str, value: Any) -> None: 80 self.additional_properties[key] = value 81 82 def __delitem__(self, key: str) -> None: 83 del self.additional_properties[key] 84 85 def __contains__(self, key: str) -> bool: 86 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- description (str):
- logo_url (str):
27def __init__(self, id, name, description, logo_url): 28 self.id = id 29 self.name = name 30 self.description = description 31 self.logo_url = logo_url 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class LoginProvider.
29 def to_dict(self) -> dict[str, Any]: 30 id = self.id 31 32 name = self.name 33 34 description = self.description 35 36 logo_url = self.logo_url 37 38 field_dict: dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "id": id, 43 "name": name, 44 "description": description, 45 "logoUrl": logo_url, 46 } 47 ) 48 49 return field_dict
51 @classmethod 52 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 53 d = dict(src_dict) 54 id = d.pop("id") 55 56 name = d.pop("name") 57 58 description = d.pop("description") 59 60 logo_url = d.pop("logoUrl") 61 62 login_provider = cls( 63 id=id, 64 name=name, 65 description=description, 66 logo_url=logo_url, 67 ) 68 69 login_provider.additional_properties = d 70 return login_provider
22@_attrs_define 23class Message: 24 """ 25 Attributes: 26 message_type (MessageType): 27 id (str): 28 message (str): 29 links (list[Entity]): 30 has_replies (bool): 31 created_by (str): 32 created_at (datetime.datetime): 33 updated_at (datetime.datetime): 34 parent_message_id (None | str | Unset): 35 """ 36 37 message_type: MessageType 38 id: str 39 message: str 40 links: list[Entity] 41 has_replies: bool 42 created_by: str 43 created_at: datetime.datetime 44 updated_at: datetime.datetime 45 parent_message_id: None | str | Unset = UNSET 46 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 47 48 def to_dict(self) -> dict[str, Any]: 49 message_type = self.message_type.value 50 51 id = self.id 52 53 message = self.message 54 55 links = [] 56 for links_item_data in self.links: 57 links_item = links_item_data.to_dict() 58 links.append(links_item) 59 60 has_replies = self.has_replies 61 62 created_by = self.created_by 63 64 created_at = self.created_at.isoformat() 65 66 updated_at = self.updated_at.isoformat() 67 68 parent_message_id: None | str | Unset 69 if isinstance(self.parent_message_id, Unset): 70 parent_message_id = UNSET 71 else: 72 parent_message_id = self.parent_message_id 73 74 field_dict: dict[str, Any] = {} 75 field_dict.update(self.additional_properties) 76 field_dict.update( 77 { 78 "messageType": message_type, 79 "id": id, 80 "message": message, 81 "links": links, 82 "hasReplies": has_replies, 83 "createdBy": created_by, 84 "createdAt": created_at, 85 "updatedAt": updated_at, 86 } 87 ) 88 if parent_message_id is not UNSET: 89 field_dict["parentMessageId"] = parent_message_id 90 91 return field_dict 92 93 @classmethod 94 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 95 from ..models.entity import Entity 96 97 d = dict(src_dict) 98 message_type = MessageType(d.pop("messageType")) 99 100 id = d.pop("id") 101 102 message = d.pop("message") 103 104 links = [] 105 _links = d.pop("links") 106 for links_item_data in _links: 107 links_item = Entity.from_dict(links_item_data) 108 109 links.append(links_item) 110 111 has_replies = d.pop("hasReplies") 112 113 created_by = d.pop("createdBy") 114 115 created_at = isoparse(d.pop("createdAt")) 116 117 updated_at = isoparse(d.pop("updatedAt")) 118 119 def _parse_parent_message_id(data: object) -> None | str | Unset: 120 if data is None: 121 return data 122 if isinstance(data, Unset): 123 return data 124 return cast(None | str | Unset, data) 125 126 parent_message_id = _parse_parent_message_id(d.pop("parentMessageId", UNSET)) 127 128 message = cls( 129 message_type=message_type, 130 id=id, 131 message=message, 132 links=links, 133 has_replies=has_replies, 134 created_by=created_by, 135 created_at=created_at, 136 updated_at=updated_at, 137 parent_message_id=parent_message_id, 138 ) 139 140 message.additional_properties = d 141 return message 142 143 @property 144 def additional_keys(self) -> list[str]: 145 return list(self.additional_properties.keys()) 146 147 def __getitem__(self, key: str) -> Any: 148 return self.additional_properties[key] 149 150 def __setitem__(self, key: str, value: Any) -> None: 151 self.additional_properties[key] = value 152 153 def __delitem__(self, key: str) -> None: 154 del self.additional_properties[key] 155 156 def __contains__(self, key: str) -> bool: 157 return key in self.additional_properties
Attributes:
- message_type (MessageType):
- id (str):
- message (str):
- links (list[Entity]):
- has_replies (bool):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- parent_message_id (None | str | Unset):
32def __init__(self, message_type, id, message, links, has_replies, created_by, created_at, updated_at, parent_message_id=attr_dict['parent_message_id'].default): 33 self.message_type = message_type 34 self.id = id 35 self.message = message 36 self.links = links 37 self.has_replies = has_replies 38 self.created_by = created_by 39 self.created_at = created_at 40 self.updated_at = updated_at 41 self.parent_message_id = parent_message_id 42 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Message.
48 def to_dict(self) -> dict[str, Any]: 49 message_type = self.message_type.value 50 51 id = self.id 52 53 message = self.message 54 55 links = [] 56 for links_item_data in self.links: 57 links_item = links_item_data.to_dict() 58 links.append(links_item) 59 60 has_replies = self.has_replies 61 62 created_by = self.created_by 63 64 created_at = self.created_at.isoformat() 65 66 updated_at = self.updated_at.isoformat() 67 68 parent_message_id: None | str | Unset 69 if isinstance(self.parent_message_id, Unset): 70 parent_message_id = UNSET 71 else: 72 parent_message_id = self.parent_message_id 73 74 field_dict: dict[str, Any] = {} 75 field_dict.update(self.additional_properties) 76 field_dict.update( 77 { 78 "messageType": message_type, 79 "id": id, 80 "message": message, 81 "links": links, 82 "hasReplies": has_replies, 83 "createdBy": created_by, 84 "createdAt": created_at, 85 "updatedAt": updated_at, 86 } 87 ) 88 if parent_message_id is not UNSET: 89 field_dict["parentMessageId"] = parent_message_id 90 91 return field_dict
93 @classmethod 94 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 95 from ..models.entity import Entity 96 97 d = dict(src_dict) 98 message_type = MessageType(d.pop("messageType")) 99 100 id = d.pop("id") 101 102 message = d.pop("message") 103 104 links = [] 105 _links = d.pop("links") 106 for links_item_data in _links: 107 links_item = Entity.from_dict(links_item_data) 108 109 links.append(links_item) 110 111 has_replies = d.pop("hasReplies") 112 113 created_by = d.pop("createdBy") 114 115 created_at = isoparse(d.pop("createdAt")) 116 117 updated_at = isoparse(d.pop("updatedAt")) 118 119 def _parse_parent_message_id(data: object) -> None | str | Unset: 120 if data is None: 121 return data 122 if isinstance(data, Unset): 123 return data 124 return cast(None | str | Unset, data) 125 126 parent_message_id = _parse_parent_message_id(d.pop("parentMessageId", UNSET)) 127 128 message = cls( 129 message_type=message_type, 130 id=id, 131 message=message, 132 links=links, 133 has_replies=has_replies, 134 created_by=created_by, 135 created_at=created_at, 136 updated_at=updated_at, 137 parent_message_id=parent_message_id, 138 ) 139 140 message.additional_properties = d 141 return message
15@_attrs_define 16class MessageInput: 17 """ 18 Attributes: 19 message (str): 20 parent_message_id (None | str | Unset): 21 """ 22 23 message: str 24 parent_message_id: None | str | Unset = UNSET 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 message = self.message 29 30 parent_message_id: None | str | Unset 31 if isinstance(self.parent_message_id, Unset): 32 parent_message_id = UNSET 33 else: 34 parent_message_id = self.parent_message_id 35 36 field_dict: dict[str, Any] = {} 37 field_dict.update(self.additional_properties) 38 field_dict.update( 39 { 40 "message": message, 41 } 42 ) 43 if parent_message_id is not UNSET: 44 field_dict["parentMessageId"] = parent_message_id 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 d = dict(src_dict) 51 message = d.pop("message") 52 53 def _parse_parent_message_id(data: object) -> None | str | Unset: 54 if data is None: 55 return data 56 if isinstance(data, Unset): 57 return data 58 return cast(None | str | Unset, data) 59 60 parent_message_id = _parse_parent_message_id(d.pop("parentMessageId", UNSET)) 61 62 message_input = cls( 63 message=message, 64 parent_message_id=parent_message_id, 65 ) 66 67 message_input.additional_properties = d 68 return message_input 69 70 @property 71 def additional_keys(self) -> list[str]: 72 return list(self.additional_properties.keys()) 73 74 def __getitem__(self, key: str) -> Any: 75 return self.additional_properties[key] 76 77 def __setitem__(self, key: str, value: Any) -> None: 78 self.additional_properties[key] = value 79 80 def __delitem__(self, key: str) -> None: 81 del self.additional_properties[key] 82 83 def __contains__(self, key: str) -> bool: 84 return key in self.additional_properties
Attributes:
- message (str):
- parent_message_id (None | str | Unset):
25def __init__(self, message, parent_message_id=attr_dict['parent_message_id'].default): 26 self.message = message 27 self.parent_message_id = parent_message_id 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class MessageInput.
27 def to_dict(self) -> dict[str, Any]: 28 message = self.message 29 30 parent_message_id: None | str | Unset 31 if isinstance(self.parent_message_id, Unset): 32 parent_message_id = UNSET 33 else: 34 parent_message_id = self.parent_message_id 35 36 field_dict: dict[str, Any] = {} 37 field_dict.update(self.additional_properties) 38 field_dict.update( 39 { 40 "message": message, 41 } 42 ) 43 if parent_message_id is not UNSET: 44 field_dict["parentMessageId"] = parent_message_id 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 d = dict(src_dict) 51 message = d.pop("message") 52 53 def _parse_parent_message_id(data: object) -> None | str | Unset: 54 if data is None: 55 return data 56 if isinstance(data, Unset): 57 return data 58 return cast(None | str | Unset, data) 59 60 parent_message_id = _parse_parent_message_id(d.pop("parentMessageId", UNSET)) 61 62 message_input = cls( 63 message=message, 64 parent_message_id=parent_message_id, 65 ) 66 67 message_input.additional_properties = d 68 return message_input
5class MessageType(str, Enum): 6 SYSTEM = "SYSTEM" 7 USER = "USER" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
21@_attrs_define 22class MetricRecord: 23 """ 24 Attributes: 25 unit (str): 26 date (datetime.date | Unset): Date in ISO 8601 format 27 services (MetricRecordServices | Unset): Map of service names to metric value Example: {'Amazon Simple Storage 28 Service': 24.91}. 29 """ 30 31 unit: str 32 date: datetime.date | Unset = UNSET 33 services: MetricRecordServices | Unset = UNSET 34 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 35 36 def to_dict(self) -> dict[str, Any]: 37 unit = self.unit 38 39 date: str | Unset = UNSET 40 if not isinstance(self.date, Unset): 41 date = self.date.isoformat() 42 43 services: dict[str, Any] | Unset = UNSET 44 if not isinstance(self.services, Unset): 45 services = self.services.to_dict() 46 47 field_dict: dict[str, Any] = {} 48 field_dict.update(self.additional_properties) 49 field_dict.update( 50 { 51 "unit": unit, 52 } 53 ) 54 if date is not UNSET: 55 field_dict["date"] = date 56 if services is not UNSET: 57 field_dict["services"] = services 58 59 return field_dict 60 61 @classmethod 62 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 63 from ..models.metric_record_services import MetricRecordServices 64 65 d = dict(src_dict) 66 unit = d.pop("unit") 67 68 _date = d.pop("date", UNSET) 69 date: datetime.date | Unset 70 if isinstance(_date, Unset): 71 date = UNSET 72 else: 73 date = isoparse(_date).date() 74 75 _services = d.pop("services", UNSET) 76 services: MetricRecordServices | Unset 77 if isinstance(_services, Unset): 78 services = UNSET 79 else: 80 services = MetricRecordServices.from_dict(_services) 81 82 metric_record = cls( 83 unit=unit, 84 date=date, 85 services=services, 86 ) 87 88 metric_record.additional_properties = d 89 return metric_record 90 91 @property 92 def additional_keys(self) -> list[str]: 93 return list(self.additional_properties.keys()) 94 95 def __getitem__(self, key: str) -> Any: 96 return self.additional_properties[key] 97 98 def __setitem__(self, key: str, value: Any) -> None: 99 self.additional_properties[key] = value 100 101 def __delitem__(self, key: str) -> None: 102 del self.additional_properties[key] 103 104 def __contains__(self, key: str) -> bool: 105 return key in self.additional_properties
Attributes:
- unit (str):
- date (datetime.date | Unset): Date in ISO 8601 format
- services (MetricRecordServices | Unset): Map of service names to metric value Example: {'Amazon Simple Storage Service': 24.91}.
26def __init__(self, unit, date=attr_dict['date'].default, services=attr_dict['services'].default): 27 self.unit = unit 28 self.date = date 29 self.services = services 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class MetricRecord.
36 def to_dict(self) -> dict[str, Any]: 37 unit = self.unit 38 39 date: str | Unset = UNSET 40 if not isinstance(self.date, Unset): 41 date = self.date.isoformat() 42 43 services: dict[str, Any] | Unset = UNSET 44 if not isinstance(self.services, Unset): 45 services = self.services.to_dict() 46 47 field_dict: dict[str, Any] = {} 48 field_dict.update(self.additional_properties) 49 field_dict.update( 50 { 51 "unit": unit, 52 } 53 ) 54 if date is not UNSET: 55 field_dict["date"] = date 56 if services is not UNSET: 57 field_dict["services"] = services 58 59 return field_dict
61 @classmethod 62 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 63 from ..models.metric_record_services import MetricRecordServices 64 65 d = dict(src_dict) 66 unit = d.pop("unit") 67 68 _date = d.pop("date", UNSET) 69 date: datetime.date | Unset 70 if isinstance(_date, Unset): 71 date = UNSET 72 else: 73 date = isoparse(_date).date() 74 75 _services = d.pop("services", UNSET) 76 services: MetricRecordServices | Unset 77 if isinstance(_services, Unset): 78 services = UNSET 79 else: 80 services = MetricRecordServices.from_dict(_services) 81 82 metric_record = cls( 83 unit=unit, 84 date=date, 85 services=services, 86 ) 87 88 metric_record.additional_properties = d 89 return metric_record
13@_attrs_define 14class MetricRecordServices: 15 """Map of service names to metric value 16 17 Example: 18 {'Amazon Simple Storage Service': 24.91} 19 20 """ 21 22 additional_properties: dict[str, float] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> dict[str, Any]: 25 field_dict: dict[str, Any] = {} 26 field_dict.update(self.additional_properties) 27 28 return field_dict 29 30 @classmethod 31 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 32 d = dict(src_dict) 33 metric_record_services = cls() 34 35 metric_record_services.additional_properties = d 36 return metric_record_services 37 38 @property 39 def additional_keys(self) -> list[str]: 40 return list(self.additional_properties.keys()) 41 42 def __getitem__(self, key: str) -> float: 43 return self.additional_properties[key] 44 45 def __setitem__(self, key: str, value: float) -> None: 46 self.additional_properties[key] = value 47 48 def __delitem__(self, key: str) -> None: 49 del self.additional_properties[key] 50 51 def __contains__(self, key: str) -> bool: 52 return key in self.additional_properties
Map of service names to metric value
Example:
{'Amazon Simple Storage Service': 24.91}
15@_attrs_define 16class MountedDataset: 17 """Represents a mounted dataset in a workspace 18 19 Attributes: 20 name (str): Folder name that appears in the workspace 21 dataset_id (None | str | Unset): ID of the dataset to mount 22 custom_uri (None | str | Unset): Full S3 URI to mounted data (if mounting custom path) 23 """ 24 25 name: str 26 dataset_id: None | str | Unset = UNSET 27 custom_uri: None | str | Unset = UNSET 28 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 29 30 def to_dict(self) -> dict[str, Any]: 31 name = self.name 32 33 dataset_id: None | str | Unset 34 if isinstance(self.dataset_id, Unset): 35 dataset_id = UNSET 36 else: 37 dataset_id = self.dataset_id 38 39 custom_uri: None | str | Unset 40 if isinstance(self.custom_uri, Unset): 41 custom_uri = UNSET 42 else: 43 custom_uri = self.custom_uri 44 45 field_dict: dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "name": name, 50 } 51 ) 52 if dataset_id is not UNSET: 53 field_dict["datasetId"] = dataset_id 54 if custom_uri is not UNSET: 55 field_dict["customUri"] = custom_uri 56 57 return field_dict 58 59 @classmethod 60 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 61 d = dict(src_dict) 62 name = d.pop("name") 63 64 def _parse_dataset_id(data: object) -> None | str | Unset: 65 if data is None: 66 return data 67 if isinstance(data, Unset): 68 return data 69 return cast(None | str | Unset, data) 70 71 dataset_id = _parse_dataset_id(d.pop("datasetId", UNSET)) 72 73 def _parse_custom_uri(data: object) -> None | str | Unset: 74 if data is None: 75 return data 76 if isinstance(data, Unset): 77 return data 78 return cast(None | str | Unset, data) 79 80 custom_uri = _parse_custom_uri(d.pop("customUri", UNSET)) 81 82 mounted_dataset = cls( 83 name=name, 84 dataset_id=dataset_id, 85 custom_uri=custom_uri, 86 ) 87 88 mounted_dataset.additional_properties = d 89 return mounted_dataset 90 91 @property 92 def additional_keys(self) -> list[str]: 93 return list(self.additional_properties.keys()) 94 95 def __getitem__(self, key: str) -> Any: 96 return self.additional_properties[key] 97 98 def __setitem__(self, key: str, value: Any) -> None: 99 self.additional_properties[key] = value 100 101 def __delitem__(self, key: str) -> None: 102 del self.additional_properties[key] 103 104 def __contains__(self, key: str) -> bool: 105 return key in self.additional_properties
Represents a mounted dataset in a workspace
Attributes:
- name (str): Folder name that appears in the workspace
- dataset_id (None | str | Unset): ID of the dataset to mount
- custom_uri (None | str | Unset): Full S3 URI to mounted data (if mounting custom path)
26def __init__(self, name, dataset_id=attr_dict['dataset_id'].default, custom_uri=attr_dict['custom_uri'].default): 27 self.name = name 28 self.dataset_id = dataset_id 29 self.custom_uri = custom_uri 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class MountedDataset.
30 def to_dict(self) -> dict[str, Any]: 31 name = self.name 32 33 dataset_id: None | str | Unset 34 if isinstance(self.dataset_id, Unset): 35 dataset_id = UNSET 36 else: 37 dataset_id = self.dataset_id 38 39 custom_uri: None | str | Unset 40 if isinstance(self.custom_uri, Unset): 41 custom_uri = UNSET 42 else: 43 custom_uri = self.custom_uri 44 45 field_dict: dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "name": name, 50 } 51 ) 52 if dataset_id is not UNSET: 53 field_dict["datasetId"] = dataset_id 54 if custom_uri is not UNSET: 55 field_dict["customUri"] = custom_uri 56 57 return field_dict
59 @classmethod 60 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 61 d = dict(src_dict) 62 name = d.pop("name") 63 64 def _parse_dataset_id(data: object) -> None | str | Unset: 65 if data is None: 66 return data 67 if isinstance(data, Unset): 68 return data 69 return cast(None | str | Unset, data) 70 71 dataset_id = _parse_dataset_id(d.pop("datasetId", UNSET)) 72 73 def _parse_custom_uri(data: object) -> None | str | Unset: 74 if data is None: 75 return data 76 if isinstance(data, Unset): 77 return data 78 return cast(None | str | Unset, data) 79 80 custom_uri = _parse_custom_uri(d.pop("customUri", UNSET)) 81 82 mounted_dataset = cls( 83 name=name, 84 dataset_id=dataset_id, 85 custom_uri=custom_uri, 86 ) 87 88 mounted_dataset.additional_properties = d 89 return mounted_dataset
13@_attrs_define 14class MoveDatasetInput: 15 """ 16 Attributes: 17 dataset_id (str): 18 source_project_id (str): 19 target_project_id (str): 20 """ 21 22 dataset_id: str 23 source_project_id: str 24 target_project_id: str 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 dataset_id = self.dataset_id 29 30 source_project_id = self.source_project_id 31 32 target_project_id = self.target_project_id 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "datasetId": dataset_id, 39 "sourceProjectId": source_project_id, 40 "targetProjectId": target_project_id, 41 } 42 ) 43 44 return field_dict 45 46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 dataset_id = d.pop("datasetId") 50 51 source_project_id = d.pop("sourceProjectId") 52 53 target_project_id = d.pop("targetProjectId") 54 55 move_dataset_input = cls( 56 dataset_id=dataset_id, 57 source_project_id=source_project_id, 58 target_project_id=target_project_id, 59 ) 60 61 move_dataset_input.additional_properties = d 62 return move_dataset_input 63 64 @property 65 def additional_keys(self) -> list[str]: 66 return list(self.additional_properties.keys()) 67 68 def __getitem__(self, key: str) -> Any: 69 return self.additional_properties[key] 70 71 def __setitem__(self, key: str, value: Any) -> None: 72 self.additional_properties[key] = value 73 74 def __delitem__(self, key: str) -> None: 75 del self.additional_properties[key] 76 77 def __contains__(self, key: str) -> bool: 78 return key in self.additional_properties
Attributes:
- dataset_id (str):
- source_project_id (str):
- target_project_id (str):
26def __init__(self, dataset_id, source_project_id, target_project_id): 27 self.dataset_id = dataset_id 28 self.source_project_id = source_project_id 29 self.target_project_id = target_project_id 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class MoveDatasetInput.
27 def to_dict(self) -> dict[str, Any]: 28 dataset_id = self.dataset_id 29 30 source_project_id = self.source_project_id 31 32 target_project_id = self.target_project_id 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "datasetId": dataset_id, 39 "sourceProjectId": source_project_id, 40 "targetProjectId": target_project_id, 41 } 42 ) 43 44 return field_dict
46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 dataset_id = d.pop("datasetId") 50 51 source_project_id = d.pop("sourceProjectId") 52 53 target_project_id = d.pop("targetProjectId") 54 55 move_dataset_input = cls( 56 dataset_id=dataset_id, 57 source_project_id=source_project_id, 58 target_project_id=target_project_id, 59 ) 60 61 move_dataset_input.additional_properties = d 62 return move_dataset_input
13@_attrs_define 14class MoveDatasetResponse: 15 """ 16 Attributes: 17 s_3_copy_command (str): 18 s_3_delete_command (str): 19 samples_not_moved (list[str]): 20 """ 21 22 s_3_copy_command: str 23 s_3_delete_command: str 24 samples_not_moved: list[str] 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 s_3_copy_command = self.s_3_copy_command 29 30 s_3_delete_command = self.s_3_delete_command 31 32 samples_not_moved = self.samples_not_moved 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "s3CopyCommand": s_3_copy_command, 39 "s3DeleteCommand": s_3_delete_command, 40 "samplesNotMoved": samples_not_moved, 41 } 42 ) 43 44 return field_dict 45 46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 s_3_copy_command = d.pop("s3CopyCommand") 50 51 s_3_delete_command = d.pop("s3DeleteCommand") 52 53 samples_not_moved = cast(list[str], d.pop("samplesNotMoved")) 54 55 move_dataset_response = cls( 56 s_3_copy_command=s_3_copy_command, 57 s_3_delete_command=s_3_delete_command, 58 samples_not_moved=samples_not_moved, 59 ) 60 61 move_dataset_response.additional_properties = d 62 return move_dataset_response 63 64 @property 65 def additional_keys(self) -> list[str]: 66 return list(self.additional_properties.keys()) 67 68 def __getitem__(self, key: str) -> Any: 69 return self.additional_properties[key] 70 71 def __setitem__(self, key: str, value: Any) -> None: 72 self.additional_properties[key] = value 73 74 def __delitem__(self, key: str) -> None: 75 del self.additional_properties[key] 76 77 def __contains__(self, key: str) -> bool: 78 return key in self.additional_properties
Attributes:
- s_3_copy_command (str):
- s_3_delete_command (str):
- samples_not_moved (list[str]):
26def __init__(self, s_3_copy_command, s_3_delete_command, samples_not_moved): 27 self.s_3_copy_command = s_3_copy_command 28 self.s_3_delete_command = s_3_delete_command 29 self.samples_not_moved = samples_not_moved 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class MoveDatasetResponse.
27 def to_dict(self) -> dict[str, Any]: 28 s_3_copy_command = self.s_3_copy_command 29 30 s_3_delete_command = self.s_3_delete_command 31 32 samples_not_moved = self.samples_not_moved 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "s3CopyCommand": s_3_copy_command, 39 "s3DeleteCommand": s_3_delete_command, 40 "samplesNotMoved": samples_not_moved, 41 } 42 ) 43 44 return field_dict
46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 s_3_copy_command = d.pop("s3CopyCommand") 50 51 s_3_delete_command = d.pop("s3DeleteCommand") 52 53 samples_not_moved = cast(list[str], d.pop("samplesNotMoved")) 54 55 move_dataset_response = cls( 56 s_3_copy_command=s_3_copy_command, 57 s_3_delete_command=s_3_delete_command, 58 samples_not_moved=samples_not_moved, 59 ) 60 61 move_dataset_response.additional_properties = d 62 return move_dataset_response
13@_attrs_define 14class NamedItem: 15 """ 16 Attributes: 17 id (str): 18 name (str): 19 """ 20 21 id: str 22 name: str 23 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 24 25 def to_dict(self) -> dict[str, Any]: 26 id = self.id 27 28 name = self.name 29 30 field_dict: dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "id": id, 35 "name": name, 36 } 37 ) 38 39 return field_dict 40 41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 id = d.pop("id") 45 46 name = d.pop("name") 47 48 named_item = cls( 49 id=id, 50 name=name, 51 ) 52 53 named_item.additional_properties = d 54 return named_item 55 56 @property 57 def additional_keys(self) -> list[str]: 58 return list(self.additional_properties.keys()) 59 60 def __getitem__(self, key: str) -> Any: 61 return self.additional_properties[key] 62 63 def __setitem__(self, key: str, value: Any) -> None: 64 self.additional_properties[key] = value 65 66 def __delitem__(self, key: str) -> None: 67 del self.additional_properties[key] 68 69 def __contains__(self, key: str) -> bool: 70 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
25def __init__(self, id, name): 26 self.id = id 27 self.name = name 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class NamedItem.
17@_attrs_define 18class NotebookInstance: 19 """ 20 Attributes: 21 id (str): 22 name (str): 23 status (Status): 24 status_message (str): 25 instance_type (str): 26 accelerator_types (list[str]): 27 git_repositories (list[str]): 28 volume_size_gb (int): 29 is_shared_with_project (bool): 30 created_by (str): 31 created_at (datetime.datetime): 32 updated_at (datetime.datetime): 33 """ 34 35 id: str 36 name: str 37 status: Status 38 status_message: str 39 instance_type: str 40 accelerator_types: list[str] 41 git_repositories: list[str] 42 volume_size_gb: int 43 is_shared_with_project: bool 44 created_by: str 45 created_at: datetime.datetime 46 updated_at: datetime.datetime 47 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 48 49 def to_dict(self) -> dict[str, Any]: 50 id = self.id 51 52 name = self.name 53 54 status = self.status.value 55 56 status_message = self.status_message 57 58 instance_type = self.instance_type 59 60 accelerator_types = self.accelerator_types 61 62 git_repositories = self.git_repositories 63 64 volume_size_gb = self.volume_size_gb 65 66 is_shared_with_project = self.is_shared_with_project 67 68 created_by = self.created_by 69 70 created_at = self.created_at.isoformat() 71 72 updated_at = self.updated_at.isoformat() 73 74 field_dict: dict[str, Any] = {} 75 field_dict.update(self.additional_properties) 76 field_dict.update( 77 { 78 "id": id, 79 "name": name, 80 "status": status, 81 "statusMessage": status_message, 82 "instanceType": instance_type, 83 "acceleratorTypes": accelerator_types, 84 "gitRepositories": git_repositories, 85 "volumeSizeGB": volume_size_gb, 86 "isSharedWithProject": is_shared_with_project, 87 "createdBy": created_by, 88 "createdAt": created_at, 89 "updatedAt": updated_at, 90 } 91 ) 92 93 return field_dict 94 95 @classmethod 96 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 97 d = dict(src_dict) 98 id = d.pop("id") 99 100 name = d.pop("name") 101 102 status = Status(d.pop("status")) 103 104 status_message = d.pop("statusMessage") 105 106 instance_type = d.pop("instanceType") 107 108 accelerator_types = cast(list[str], d.pop("acceleratorTypes")) 109 110 git_repositories = cast(list[str], d.pop("gitRepositories")) 111 112 volume_size_gb = d.pop("volumeSizeGB") 113 114 is_shared_with_project = d.pop("isSharedWithProject") 115 116 created_by = d.pop("createdBy") 117 118 created_at = isoparse(d.pop("createdAt")) 119 120 updated_at = isoparse(d.pop("updatedAt")) 121 122 notebook_instance = cls( 123 id=id, 124 name=name, 125 status=status, 126 status_message=status_message, 127 instance_type=instance_type, 128 accelerator_types=accelerator_types, 129 git_repositories=git_repositories, 130 volume_size_gb=volume_size_gb, 131 is_shared_with_project=is_shared_with_project, 132 created_by=created_by, 133 created_at=created_at, 134 updated_at=updated_at, 135 ) 136 137 notebook_instance.additional_properties = d 138 return notebook_instance 139 140 @property 141 def additional_keys(self) -> list[str]: 142 return list(self.additional_properties.keys()) 143 144 def __getitem__(self, key: str) -> Any: 145 return self.additional_properties[key] 146 147 def __setitem__(self, key: str, value: Any) -> None: 148 self.additional_properties[key] = value 149 150 def __delitem__(self, key: str) -> None: 151 del self.additional_properties[key] 152 153 def __contains__(self, key: str) -> bool: 154 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- status (Status):
- status_message (str):
- instance_type (str):
- accelerator_types (list[str]):
- git_repositories (list[str]):
- volume_size_gb (int):
- is_shared_with_project (bool):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
35def __init__(self, id, name, status, status_message, instance_type, accelerator_types, git_repositories, volume_size_gb, is_shared_with_project, created_by, created_at, updated_at): 36 self.id = id 37 self.name = name 38 self.status = status 39 self.status_message = status_message 40 self.instance_type = instance_type 41 self.accelerator_types = accelerator_types 42 self.git_repositories = git_repositories 43 self.volume_size_gb = volume_size_gb 44 self.is_shared_with_project = is_shared_with_project 45 self.created_by = created_by 46 self.created_at = created_at 47 self.updated_at = updated_at 48 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class NotebookInstance.
49 def to_dict(self) -> dict[str, Any]: 50 id = self.id 51 52 name = self.name 53 54 status = self.status.value 55 56 status_message = self.status_message 57 58 instance_type = self.instance_type 59 60 accelerator_types = self.accelerator_types 61 62 git_repositories = self.git_repositories 63 64 volume_size_gb = self.volume_size_gb 65 66 is_shared_with_project = self.is_shared_with_project 67 68 created_by = self.created_by 69 70 created_at = self.created_at.isoformat() 71 72 updated_at = self.updated_at.isoformat() 73 74 field_dict: dict[str, Any] = {} 75 field_dict.update(self.additional_properties) 76 field_dict.update( 77 { 78 "id": id, 79 "name": name, 80 "status": status, 81 "statusMessage": status_message, 82 "instanceType": instance_type, 83 "acceleratorTypes": accelerator_types, 84 "gitRepositories": git_repositories, 85 "volumeSizeGB": volume_size_gb, 86 "isSharedWithProject": is_shared_with_project, 87 "createdBy": created_by, 88 "createdAt": created_at, 89 "updatedAt": updated_at, 90 } 91 ) 92 93 return field_dict
95 @classmethod 96 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 97 d = dict(src_dict) 98 id = d.pop("id") 99 100 name = d.pop("name") 101 102 status = Status(d.pop("status")) 103 104 status_message = d.pop("statusMessage") 105 106 instance_type = d.pop("instanceType") 107 108 accelerator_types = cast(list[str], d.pop("acceleratorTypes")) 109 110 git_repositories = cast(list[str], d.pop("gitRepositories")) 111 112 volume_size_gb = d.pop("volumeSizeGB") 113 114 is_shared_with_project = d.pop("isSharedWithProject") 115 116 created_by = d.pop("createdBy") 117 118 created_at = isoparse(d.pop("createdAt")) 119 120 updated_at = isoparse(d.pop("updatedAt")) 121 122 notebook_instance = cls( 123 id=id, 124 name=name, 125 status=status, 126 status_message=status_message, 127 instance_type=instance_type, 128 accelerator_types=accelerator_types, 129 git_repositories=git_repositories, 130 volume_size_gb=volume_size_gb, 131 is_shared_with_project=is_shared_with_project, 132 created_by=created_by, 133 created_at=created_at, 134 updated_at=updated_at, 135 ) 136 137 notebook_instance.additional_properties = d 138 return notebook_instance
13@_attrs_define 14class NotebookInstanceStatusResponse: 15 """ 16 Attributes: 17 status (str): 18 status_message (str): 19 """ 20 21 status: str 22 status_message: str 23 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 24 25 def to_dict(self) -> dict[str, Any]: 26 status = self.status 27 28 status_message = self.status_message 29 30 field_dict: dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "status": status, 35 "statusMessage": status_message, 36 } 37 ) 38 39 return field_dict 40 41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 status = d.pop("status") 45 46 status_message = d.pop("statusMessage") 47 48 notebook_instance_status_response = cls( 49 status=status, 50 status_message=status_message, 51 ) 52 53 notebook_instance_status_response.additional_properties = d 54 return notebook_instance_status_response 55 56 @property 57 def additional_keys(self) -> list[str]: 58 return list(self.additional_properties.keys()) 59 60 def __getitem__(self, key: str) -> Any: 61 return self.additional_properties[key] 62 63 def __setitem__(self, key: str, value: Any) -> None: 64 self.additional_properties[key] = value 65 66 def __delitem__(self, key: str) -> None: 67 del self.additional_properties[key] 68 69 def __contains__(self, key: str) -> bool: 70 return key in self.additional_properties
Attributes:
- status (str):
- status_message (str):
25def __init__(self, status, status_message): 26 self.status = status 27 self.status_message = status_message 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class NotebookInstanceStatusResponse.
25 def to_dict(self) -> dict[str, Any]: 26 status = self.status 27 28 status_message = self.status_message 29 30 field_dict: dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "status": status, 35 "statusMessage": status_message, 36 } 37 ) 38 39 return field_dict
41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 status = d.pop("status") 45 46 status_message = d.pop("statusMessage") 47 48 notebook_instance_status_response = cls( 49 status=status, 50 status_message=status_message, 51 ) 52 53 notebook_instance_status_response.additional_properties = d 54 return notebook_instance_status_response
13@_attrs_define 14class OpenNotebookInstanceResponse: 15 """ 16 Attributes: 17 url (str): 18 message (str): 19 """ 20 21 url: str 22 message: str 23 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 24 25 def to_dict(self) -> dict[str, Any]: 26 url = self.url 27 28 message = self.message 29 30 field_dict: dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "url": url, 35 "message": message, 36 } 37 ) 38 39 return field_dict 40 41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 url = d.pop("url") 45 46 message = d.pop("message") 47 48 open_notebook_instance_response = cls( 49 url=url, 50 message=message, 51 ) 52 53 open_notebook_instance_response.additional_properties = d 54 return open_notebook_instance_response 55 56 @property 57 def additional_keys(self) -> list[str]: 58 return list(self.additional_properties.keys()) 59 60 def __getitem__(self, key: str) -> Any: 61 return self.additional_properties[key] 62 63 def __setitem__(self, key: str, value: Any) -> None: 64 self.additional_properties[key] = value 65 66 def __delitem__(self, key: str) -> None: 67 del self.additional_properties[key] 68 69 def __contains__(self, key: str) -> bool: 70 return key in self.additional_properties
Attributes:
- url (str):
- message (str):
25def __init__(self, url, message): 26 self.url = url 27 self.message = message 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class OpenNotebookInstanceResponse.
41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 url = d.pop("url") 45 46 message = d.pop("message") 47 48 open_notebook_instance_response = cls( 49 url=url, 50 message=message, 51 ) 52 53 open_notebook_instance_response.additional_properties = d 54 return open_notebook_instance_response
17@_attrs_define 18class PaginatedResponseDatasetListDto: 19 """ 20 Attributes: 21 data (list[Dataset]): 22 next_token (str): 23 """ 24 25 data: list[Dataset] 26 next_token: str 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 data = [] 31 for data_item_data in self.data: 32 data_item = data_item_data.to_dict() 33 data.append(data_item) 34 35 next_token = self.next_token 36 37 field_dict: dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "data": data, 42 "nextToken": next_token, 43 } 44 ) 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 from ..models.dataset import Dataset 51 52 d = dict(src_dict) 53 data = [] 54 _data = d.pop("data") 55 for data_item_data in _data: 56 data_item = Dataset.from_dict(data_item_data) 57 58 data.append(data_item) 59 60 next_token = d.pop("nextToken") 61 62 paginated_response_dataset_list_dto = cls( 63 data=data, 64 next_token=next_token, 65 ) 66 67 paginated_response_dataset_list_dto.additional_properties = d 68 return paginated_response_dataset_list_dto 69 70 @property 71 def additional_keys(self) -> list[str]: 72 return list(self.additional_properties.keys()) 73 74 def __getitem__(self, key: str) -> Any: 75 return self.additional_properties[key] 76 77 def __setitem__(self, key: str, value: Any) -> None: 78 self.additional_properties[key] = value 79 80 def __delitem__(self, key: str) -> None: 81 del self.additional_properties[key] 82 83 def __contains__(self, key: str) -> bool: 84 return key in self.additional_properties
Attributes:
- data (list[Dataset]):
- next_token (str):
25def __init__(self, data, next_token): 26 self.data = data 27 self.next_token = next_token 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PaginatedResponseDatasetListDto.
29 def to_dict(self) -> dict[str, Any]: 30 data = [] 31 for data_item_data in self.data: 32 data_item = data_item_data.to_dict() 33 data.append(data_item) 34 35 next_token = self.next_token 36 37 field_dict: dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "data": data, 42 "nextToken": next_token, 43 } 44 ) 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 from ..models.dataset import Dataset 51 52 d = dict(src_dict) 53 data = [] 54 _data = d.pop("data") 55 for data_item_data in _data: 56 data_item = Dataset.from_dict(data_item_data) 57 58 data.append(data_item) 59 60 next_token = d.pop("nextToken") 61 62 paginated_response_dataset_list_dto = cls( 63 data=data, 64 next_token=next_token, 65 ) 66 67 paginated_response_dataset_list_dto.additional_properties = d 68 return paginated_response_dataset_list_dto
17@_attrs_define 18class PaginatedResponseDiscussion: 19 """ 20 Attributes: 21 data (list[Discussion]): 22 next_token (str): 23 """ 24 25 data: list[Discussion] 26 next_token: str 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 data = [] 31 for data_item_data in self.data: 32 data_item = data_item_data.to_dict() 33 data.append(data_item) 34 35 next_token = self.next_token 36 37 field_dict: dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "data": data, 42 "nextToken": next_token, 43 } 44 ) 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 from ..models.discussion import Discussion 51 52 d = dict(src_dict) 53 data = [] 54 _data = d.pop("data") 55 for data_item_data in _data: 56 data_item = Discussion.from_dict(data_item_data) 57 58 data.append(data_item) 59 60 next_token = d.pop("nextToken") 61 62 paginated_response_discussion = cls( 63 data=data, 64 next_token=next_token, 65 ) 66 67 paginated_response_discussion.additional_properties = d 68 return paginated_response_discussion 69 70 @property 71 def additional_keys(self) -> list[str]: 72 return list(self.additional_properties.keys()) 73 74 def __getitem__(self, key: str) -> Any: 75 return self.additional_properties[key] 76 77 def __setitem__(self, key: str, value: Any) -> None: 78 self.additional_properties[key] = value 79 80 def __delitem__(self, key: str) -> None: 81 del self.additional_properties[key] 82 83 def __contains__(self, key: str) -> bool: 84 return key in self.additional_properties
Attributes:
- data (list[Discussion]):
- next_token (str):
25def __init__(self, data, next_token): 26 self.data = data 27 self.next_token = next_token 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PaginatedResponseDiscussion.
29 def to_dict(self) -> dict[str, Any]: 30 data = [] 31 for data_item_data in self.data: 32 data_item = data_item_data.to_dict() 33 data.append(data_item) 34 35 next_token = self.next_token 36 37 field_dict: dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "data": data, 42 "nextToken": next_token, 43 } 44 ) 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 from ..models.discussion import Discussion 51 52 d = dict(src_dict) 53 data = [] 54 _data = d.pop("data") 55 for data_item_data in _data: 56 data_item = Discussion.from_dict(data_item_data) 57 58 data.append(data_item) 59 60 next_token = d.pop("nextToken") 61 62 paginated_response_discussion = cls( 63 data=data, 64 next_token=next_token, 65 ) 66 67 paginated_response_discussion.additional_properties = d 68 return paginated_response_discussion
17@_attrs_define 18class PaginatedResponseMessage: 19 """ 20 Attributes: 21 data (list[Message]): 22 next_token (str): 23 """ 24 25 data: list[Message] 26 next_token: str 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 data = [] 31 for data_item_data in self.data: 32 data_item = data_item_data.to_dict() 33 data.append(data_item) 34 35 next_token = self.next_token 36 37 field_dict: dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "data": data, 42 "nextToken": next_token, 43 } 44 ) 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 from ..models.message import Message 51 52 d = dict(src_dict) 53 data = [] 54 _data = d.pop("data") 55 for data_item_data in _data: 56 data_item = Message.from_dict(data_item_data) 57 58 data.append(data_item) 59 60 next_token = d.pop("nextToken") 61 62 paginated_response_message = cls( 63 data=data, 64 next_token=next_token, 65 ) 66 67 paginated_response_message.additional_properties = d 68 return paginated_response_message 69 70 @property 71 def additional_keys(self) -> list[str]: 72 return list(self.additional_properties.keys()) 73 74 def __getitem__(self, key: str) -> Any: 75 return self.additional_properties[key] 76 77 def __setitem__(self, key: str, value: Any) -> None: 78 self.additional_properties[key] = value 79 80 def __delitem__(self, key: str) -> None: 81 del self.additional_properties[key] 82 83 def __contains__(self, key: str) -> bool: 84 return key in self.additional_properties
Attributes:
- data (list[Message]):
- next_token (str):
25def __init__(self, data, next_token): 26 self.data = data 27 self.next_token = next_token 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PaginatedResponseMessage.
29 def to_dict(self) -> dict[str, Any]: 30 data = [] 31 for data_item_data in self.data: 32 data_item = data_item_data.to_dict() 33 data.append(data_item) 34 35 next_token = self.next_token 36 37 field_dict: dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "data": data, 42 "nextToken": next_token, 43 } 44 ) 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 from ..models.message import Message 51 52 d = dict(src_dict) 53 data = [] 54 _data = d.pop("data") 55 for data_item_data in _data: 56 data_item = Message.from_dict(data_item_data) 57 58 data.append(data_item) 59 60 next_token = d.pop("nextToken") 61 62 paginated_response_message = cls( 63 data=data, 64 next_token=next_token, 65 ) 66 67 paginated_response_message.additional_properties = d 68 return paginated_response_message
17@_attrs_define 18class PaginatedResponseSampleDto: 19 """ 20 Attributes: 21 data (list[Sample]): 22 next_token (str): 23 """ 24 25 data: list[Sample] 26 next_token: str 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 data = [] 31 for data_item_data in self.data: 32 data_item = data_item_data.to_dict() 33 data.append(data_item) 34 35 next_token = self.next_token 36 37 field_dict: dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "data": data, 42 "nextToken": next_token, 43 } 44 ) 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 from ..models.sample import Sample 51 52 d = dict(src_dict) 53 data = [] 54 _data = d.pop("data") 55 for data_item_data in _data: 56 data_item = Sample.from_dict(data_item_data) 57 58 data.append(data_item) 59 60 next_token = d.pop("nextToken") 61 62 paginated_response_sample_dto = cls( 63 data=data, 64 next_token=next_token, 65 ) 66 67 paginated_response_sample_dto.additional_properties = d 68 return paginated_response_sample_dto 69 70 @property 71 def additional_keys(self) -> list[str]: 72 return list(self.additional_properties.keys()) 73 74 def __getitem__(self, key: str) -> Any: 75 return self.additional_properties[key] 76 77 def __setitem__(self, key: str, value: Any) -> None: 78 self.additional_properties[key] = value 79 80 def __delitem__(self, key: str) -> None: 81 del self.additional_properties[key] 82 83 def __contains__(self, key: str) -> bool: 84 return key in self.additional_properties
Attributes:
- data (list[Sample]):
- next_token (str):
25def __init__(self, data, next_token): 26 self.data = data 27 self.next_token = next_token 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PaginatedResponseSampleDto.
29 def to_dict(self) -> dict[str, Any]: 30 data = [] 31 for data_item_data in self.data: 32 data_item = data_item_data.to_dict() 33 data.append(data_item) 34 35 next_token = self.next_token 36 37 field_dict: dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "data": data, 42 "nextToken": next_token, 43 } 44 ) 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 from ..models.sample import Sample 51 52 d = dict(src_dict) 53 data = [] 54 _data = d.pop("data") 55 for data_item_data in _data: 56 data_item = Sample.from_dict(data_item_data) 57 58 data.append(data_item) 59 60 next_token = d.pop("nextToken") 61 62 paginated_response_sample_dto = cls( 63 data=data, 64 next_token=next_token, 65 ) 66 67 paginated_response_sample_dto.additional_properties = d 68 return paginated_response_sample_dto
17@_attrs_define 18class PaginatedResponseUserDto: 19 """ 20 Attributes: 21 data (list[User]): 22 next_token (str): 23 """ 24 25 data: list[User] 26 next_token: str 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 data = [] 31 for data_item_data in self.data: 32 data_item = data_item_data.to_dict() 33 data.append(data_item) 34 35 next_token = self.next_token 36 37 field_dict: dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "data": data, 42 "nextToken": next_token, 43 } 44 ) 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 from ..models.user import User 51 52 d = dict(src_dict) 53 data = [] 54 _data = d.pop("data") 55 for data_item_data in _data: 56 data_item = User.from_dict(data_item_data) 57 58 data.append(data_item) 59 60 next_token = d.pop("nextToken") 61 62 paginated_response_user_dto = cls( 63 data=data, 64 next_token=next_token, 65 ) 66 67 paginated_response_user_dto.additional_properties = d 68 return paginated_response_user_dto 69 70 @property 71 def additional_keys(self) -> list[str]: 72 return list(self.additional_properties.keys()) 73 74 def __getitem__(self, key: str) -> Any: 75 return self.additional_properties[key] 76 77 def __setitem__(self, key: str, value: Any) -> None: 78 self.additional_properties[key] = value 79 80 def __delitem__(self, key: str) -> None: 81 del self.additional_properties[key] 82 83 def __contains__(self, key: str) -> bool: 84 return key in self.additional_properties
Attributes:
- data (list[User]):
- next_token (str):
25def __init__(self, data, next_token): 26 self.data = data 27 self.next_token = next_token 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PaginatedResponseUserDto.
29 def to_dict(self) -> dict[str, Any]: 30 data = [] 31 for data_item_data in self.data: 32 data_item = data_item_data.to_dict() 33 data.append(data_item) 34 35 next_token = self.next_token 36 37 field_dict: dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "data": data, 42 "nextToken": next_token, 43 } 44 ) 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 from ..models.user import User 51 52 d = dict(src_dict) 53 data = [] 54 _data = d.pop("data") 55 for data_item_data in _data: 56 data_item = User.from_dict(data_item_data) 57 58 data.append(data_item) 59 60 next_token = d.pop("nextToken") 61 62 paginated_response_user_dto = cls( 63 data=data, 64 next_token=next_token, 65 ) 66 67 paginated_response_user_dto.additional_properties = d 68 return paginated_response_user_dto
15@_attrs_define 16class PipelineCode: 17 """Used to describe the pipeline analysis code, not required for ingest processes 18 19 Attributes: 20 repository_path (str): GitHub repository which contains the workflow code Example: nf-core/rnaseq. 21 version (str): Branch, tag, or commit hash of the pipeline code Example: main. 22 repository_type (RepositoryType): Type of repository 23 entry_point (str): Main script for running the pipeline Example: main.nf. 24 """ 25 26 repository_path: str 27 version: str 28 repository_type: RepositoryType 29 entry_point: str 30 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 31 32 def to_dict(self) -> dict[str, Any]: 33 repository_path = self.repository_path 34 35 version = self.version 36 37 repository_type = self.repository_type.value 38 39 entry_point = self.entry_point 40 41 field_dict: dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update( 44 { 45 "repositoryPath": repository_path, 46 "version": version, 47 "repositoryType": repository_type, 48 "entryPoint": entry_point, 49 } 50 ) 51 52 return field_dict 53 54 @classmethod 55 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 56 d = dict(src_dict) 57 repository_path = d.pop("repositoryPath") 58 59 version = d.pop("version") 60 61 repository_type = RepositoryType(d.pop("repositoryType")) 62 63 entry_point = d.pop("entryPoint") 64 65 pipeline_code = cls( 66 repository_path=repository_path, 67 version=version, 68 repository_type=repository_type, 69 entry_point=entry_point, 70 ) 71 72 pipeline_code.additional_properties = d 73 return pipeline_code 74 75 @property 76 def additional_keys(self) -> list[str]: 77 return list(self.additional_properties.keys()) 78 79 def __getitem__(self, key: str) -> Any: 80 return self.additional_properties[key] 81 82 def __setitem__(self, key: str, value: Any) -> None: 83 self.additional_properties[key] = value 84 85 def __delitem__(self, key: str) -> None: 86 del self.additional_properties[key] 87 88 def __contains__(self, key: str) -> bool: 89 return key in self.additional_properties
Used to describe the pipeline analysis code, not required for ingest processes
Attributes:
- repository_path (str): GitHub repository which contains the workflow code Example: nf-core/rnaseq.
- version (str): Branch, tag, or commit hash of the pipeline code Example: main.
- repository_type (RepositoryType): Type of repository
- entry_point (str): Main script for running the pipeline Example: main.nf.
27def __init__(self, repository_path, version, repository_type, entry_point): 28 self.repository_path = repository_path 29 self.version = version 30 self.repository_type = repository_type 31 self.entry_point = entry_point 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PipelineCode.
32 def to_dict(self) -> dict[str, Any]: 33 repository_path = self.repository_path 34 35 version = self.version 36 37 repository_type = self.repository_type.value 38 39 entry_point = self.entry_point 40 41 field_dict: dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update( 44 { 45 "repositoryPath": repository_path, 46 "version": version, 47 "repositoryType": repository_type, 48 "entryPoint": entry_point, 49 } 50 ) 51 52 return field_dict
54 @classmethod 55 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 56 d = dict(src_dict) 57 repository_path = d.pop("repositoryPath") 58 59 version = d.pop("version") 60 61 repository_type = RepositoryType(d.pop("repositoryType")) 62 63 entry_point = d.pop("entryPoint") 64 65 pipeline_code = cls( 66 repository_path=repository_path, 67 version=version, 68 repository_type=repository_type, 69 entry_point=entry_point, 70 ) 71 72 pipeline_code.additional_properties = d 73 return pipeline_code
15@_attrs_define 16class PipelineCost: 17 """ 18 Attributes: 19 total_cost (float | None | Unset): The total cost of running the pipeline 20 is_estimate (bool | Unset): Is this an estimate of the cost? 21 description (str | Unset): Description of the cost calculation 22 """ 23 24 total_cost: float | None | Unset = UNSET 25 is_estimate: bool | Unset = UNSET 26 description: str | Unset = UNSET 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 total_cost: float | None | Unset 31 if isinstance(self.total_cost, Unset): 32 total_cost = UNSET 33 else: 34 total_cost = self.total_cost 35 36 is_estimate = self.is_estimate 37 38 description = self.description 39 40 field_dict: dict[str, Any] = {} 41 field_dict.update(self.additional_properties) 42 field_dict.update({}) 43 if total_cost is not UNSET: 44 field_dict["totalCost"] = total_cost 45 if is_estimate is not UNSET: 46 field_dict["isEstimate"] = is_estimate 47 if description is not UNSET: 48 field_dict["description"] = description 49 50 return field_dict 51 52 @classmethod 53 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 54 d = dict(src_dict) 55 56 def _parse_total_cost(data: object) -> float | None | Unset: 57 if data is None: 58 return data 59 if isinstance(data, Unset): 60 return data 61 return cast(float | None | Unset, data) 62 63 total_cost = _parse_total_cost(d.pop("totalCost", UNSET)) 64 65 is_estimate = d.pop("isEstimate", UNSET) 66 67 description = d.pop("description", UNSET) 68 69 pipeline_cost = cls( 70 total_cost=total_cost, 71 is_estimate=is_estimate, 72 description=description, 73 ) 74 75 pipeline_cost.additional_properties = d 76 return pipeline_cost 77 78 @property 79 def additional_keys(self) -> list[str]: 80 return list(self.additional_properties.keys()) 81 82 def __getitem__(self, key: str) -> Any: 83 return self.additional_properties[key] 84 85 def __setitem__(self, key: str, value: Any) -> None: 86 self.additional_properties[key] = value 87 88 def __delitem__(self, key: str) -> None: 89 del self.additional_properties[key] 90 91 def __contains__(self, key: str) -> bool: 92 return key in self.additional_properties
Attributes:
- total_cost (float | None | Unset): The total cost of running the pipeline
- is_estimate (bool | Unset): Is this an estimate of the cost?
- description (str | Unset): Description of the cost calculation
26def __init__(self, total_cost=attr_dict['total_cost'].default, is_estimate=attr_dict['is_estimate'].default, description=attr_dict['description'].default): 27 self.total_cost = total_cost 28 self.is_estimate = is_estimate 29 self.description = description 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PipelineCost.
29 def to_dict(self) -> dict[str, Any]: 30 total_cost: float | None | Unset 31 if isinstance(self.total_cost, Unset): 32 total_cost = UNSET 33 else: 34 total_cost = self.total_cost 35 36 is_estimate = self.is_estimate 37 38 description = self.description 39 40 field_dict: dict[str, Any] = {} 41 field_dict.update(self.additional_properties) 42 field_dict.update({}) 43 if total_cost is not UNSET: 44 field_dict["totalCost"] = total_cost 45 if is_estimate is not UNSET: 46 field_dict["isEstimate"] = is_estimate 47 if description is not UNSET: 48 field_dict["description"] = description 49 50 return field_dict
52 @classmethod 53 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 54 d = dict(src_dict) 55 56 def _parse_total_cost(data: object) -> float | None | Unset: 57 if data is None: 58 return data 59 if isinstance(data, Unset): 60 return data 61 return cast(float | None | Unset, data) 62 63 total_cost = _parse_total_cost(d.pop("totalCost", UNSET)) 64 65 is_estimate = d.pop("isEstimate", UNSET) 66 67 description = d.pop("description", UNSET) 68 69 pipeline_cost = cls( 70 total_cost=total_cost, 71 is_estimate=is_estimate, 72 description=description, 73 ) 74 75 pipeline_cost.additional_properties = d 76 return pipeline_cost
17@_attrs_define 18class PortalErrorResponse: 19 """ 20 Attributes: 21 status_code (int): 22 error_code (str): 23 error_detail (str): 24 errors (list[ErrorMessage]): 25 """ 26 27 status_code: int 28 error_code: str 29 error_detail: str 30 errors: list[ErrorMessage] 31 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 32 33 def to_dict(self) -> dict[str, Any]: 34 status_code = self.status_code 35 36 error_code = self.error_code 37 38 error_detail = self.error_detail 39 40 errors = [] 41 for errors_item_data in self.errors: 42 errors_item = errors_item_data.to_dict() 43 errors.append(errors_item) 44 45 field_dict: dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "statusCode": status_code, 50 "errorCode": error_code, 51 "errorDetail": error_detail, 52 "errors": errors, 53 } 54 ) 55 56 return field_dict 57 58 @classmethod 59 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 60 from ..models.error_message import ErrorMessage 61 62 d = dict(src_dict) 63 status_code = d.pop("statusCode") 64 65 error_code = d.pop("errorCode") 66 67 error_detail = d.pop("errorDetail") 68 69 errors = [] 70 _errors = d.pop("errors") 71 for errors_item_data in _errors: 72 errors_item = ErrorMessage.from_dict(errors_item_data) 73 74 errors.append(errors_item) 75 76 portal_error_response = cls( 77 status_code=status_code, 78 error_code=error_code, 79 error_detail=error_detail, 80 errors=errors, 81 ) 82 83 portal_error_response.additional_properties = d 84 return portal_error_response 85 86 @property 87 def additional_keys(self) -> list[str]: 88 return list(self.additional_properties.keys()) 89 90 def __getitem__(self, key: str) -> Any: 91 return self.additional_properties[key] 92 93 def __setitem__(self, key: str, value: Any) -> None: 94 self.additional_properties[key] = value 95 96 def __delitem__(self, key: str) -> None: 97 del self.additional_properties[key] 98 99 def __contains__(self, key: str) -> bool: 100 return key in self.additional_properties
Attributes:
- status_code (int):
- error_code (str):
- error_detail (str):
- errors (list[ErrorMessage]):
27def __init__(self, status_code, error_code, error_detail, errors): 28 self.status_code = status_code 29 self.error_code = error_code 30 self.error_detail = error_detail 31 self.errors = errors 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PortalErrorResponse.
33 def to_dict(self) -> dict[str, Any]: 34 status_code = self.status_code 35 36 error_code = self.error_code 37 38 error_detail = self.error_detail 39 40 errors = [] 41 for errors_item_data in self.errors: 42 errors_item = errors_item_data.to_dict() 43 errors.append(errors_item) 44 45 field_dict: dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "statusCode": status_code, 50 "errorCode": error_code, 51 "errorDetail": error_detail, 52 "errors": errors, 53 } 54 ) 55 56 return field_dict
58 @classmethod 59 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 60 from ..models.error_message import ErrorMessage 61 62 d = dict(src_dict) 63 status_code = d.pop("statusCode") 64 65 error_code = d.pop("errorCode") 66 67 error_detail = d.pop("errorDetail") 68 69 errors = [] 70 _errors = d.pop("errors") 71 for errors_item_data in _errors: 72 errors_item = ErrorMessage.from_dict(errors_item_data) 73 74 errors.append(errors_item) 75 76 portal_error_response = cls( 77 status_code=status_code, 78 error_code=error_code, 79 error_detail=error_detail, 80 errors=errors, 81 ) 82 83 portal_error_response.additional_properties = d 84 return portal_error_response
15@_attrs_define 16class PostponeWorkspaceAutostopInput: 17 """ 18 Attributes: 19 auto_stop_timeout (int | Unset): Time period (in hours) to automatically stop the workspace if running 20 """ 21 22 auto_stop_timeout: int | Unset = UNSET 23 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 24 25 def to_dict(self) -> dict[str, Any]: 26 auto_stop_timeout = self.auto_stop_timeout 27 28 field_dict: dict[str, Any] = {} 29 field_dict.update(self.additional_properties) 30 field_dict.update({}) 31 if auto_stop_timeout is not UNSET: 32 field_dict["autoStopTimeout"] = auto_stop_timeout 33 34 return field_dict 35 36 @classmethod 37 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 38 d = dict(src_dict) 39 auto_stop_timeout = d.pop("autoStopTimeout", UNSET) 40 41 postpone_workspace_autostop_input = cls( 42 auto_stop_timeout=auto_stop_timeout, 43 ) 44 45 postpone_workspace_autostop_input.additional_properties = d 46 return postpone_workspace_autostop_input 47 48 @property 49 def additional_keys(self) -> list[str]: 50 return list(self.additional_properties.keys()) 51 52 def __getitem__(self, key: str) -> Any: 53 return self.additional_properties[key] 54 55 def __setitem__(self, key: str, value: Any) -> None: 56 self.additional_properties[key] = value 57 58 def __delitem__(self, key: str) -> None: 59 del self.additional_properties[key] 60 61 def __contains__(self, key: str) -> bool: 62 return key in self.additional_properties
Attributes:
- auto_stop_timeout (int | Unset): Time period (in hours) to automatically stop the workspace if running
24def __init__(self, auto_stop_timeout=attr_dict['auto_stop_timeout'].default): 25 self.auto_stop_timeout = auto_stop_timeout 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PostponeWorkspaceAutostopInput.
25 def to_dict(self) -> dict[str, Any]: 26 auto_stop_timeout = self.auto_stop_timeout 27 28 field_dict: dict[str, Any] = {} 29 field_dict.update(self.additional_properties) 30 field_dict.update({}) 31 if auto_stop_timeout is not UNSET: 32 field_dict["autoStopTimeout"] = auto_stop_timeout 33 34 return field_dict
36 @classmethod 37 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 38 d = dict(src_dict) 39 auto_stop_timeout = d.pop("autoStopTimeout", UNSET) 40 41 postpone_workspace_autostop_input = cls( 42 auto_stop_timeout=auto_stop_timeout, 43 ) 44 45 postpone_workspace_autostop_input.additional_properties = d 46 return postpone_workspace_autostop_input
18@_attrs_define 19class Process: 20 """Identifies a data type or pipeline in Cirro 21 22 Attributes: 23 id (str): Unique ID of the Process Example: process-hutch-magic_flute-1_0. 24 name (str): Friendly name for the process Example: MAGeCK Flute. 25 description (str): Description of the process Example: MAGeCK Flute enables accurate identification of essential 26 genes with their related biological functions. 27 data_type (str): Name of the data type this pipeline produces (if it is not defined, use the name) 28 executor (Executor): How the workflow is executed 29 child_process_ids (list[str]): IDs of pipelines that can be run downstream 30 parent_process_ids (list[str]): IDs of processes that can run this pipeline 31 linked_project_ids (list[str]): Projects that can run this process 32 is_tenant_wide (bool): Whether the process is shared with the tenant 33 allow_multiple_sources (bool): Whether the pipeline is allowed to have multiple dataset sources 34 uses_sample_sheet (bool): Whether the pipeline uses the Cirro-provided sample sheet 35 is_archived (bool): Whether the process is marked as archived 36 category (str | Unset): Category of the process Example: Microbial Analysis. 37 pipeline_type (str | Unset): Type of pipeline Example: nf-core. 38 documentation_url (str | Unset): Link to process documentation Example: 39 https://docs.cirro.bio/pipelines/catalog_targeted_sequencing/#crispr-screen-analysis. 40 file_requirements_message (str | Unset): Description of the files to be uploaded (optional) 41 owner (None | str | Unset): Username of the pipeline creator (blank if Cirro curated) 42 created_at (datetime.datetime | Unset): When the process was created (does not reflect the pipeline code) 43 updated_at (datetime.datetime | Unset): When the process was updated (does not reflect the pipeline code) 44 """ 45 46 id: str 47 name: str 48 description: str 49 data_type: str 50 executor: Executor 51 child_process_ids: list[str] 52 parent_process_ids: list[str] 53 linked_project_ids: list[str] 54 is_tenant_wide: bool 55 allow_multiple_sources: bool 56 uses_sample_sheet: bool 57 is_archived: bool 58 category: str | Unset = UNSET 59 pipeline_type: str | Unset = UNSET 60 documentation_url: str | Unset = UNSET 61 file_requirements_message: str | Unset = UNSET 62 owner: None | str | Unset = UNSET 63 created_at: datetime.datetime | Unset = UNSET 64 updated_at: datetime.datetime | Unset = UNSET 65 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 66 67 def to_dict(self) -> dict[str, Any]: 68 id = self.id 69 70 name = self.name 71 72 description = self.description 73 74 data_type = self.data_type 75 76 executor = self.executor.value 77 78 child_process_ids = self.child_process_ids 79 80 parent_process_ids = self.parent_process_ids 81 82 linked_project_ids = self.linked_project_ids 83 84 is_tenant_wide = self.is_tenant_wide 85 86 allow_multiple_sources = self.allow_multiple_sources 87 88 uses_sample_sheet = self.uses_sample_sheet 89 90 is_archived = self.is_archived 91 92 category = self.category 93 94 pipeline_type = self.pipeline_type 95 96 documentation_url = self.documentation_url 97 98 file_requirements_message = self.file_requirements_message 99 100 owner: None | str | Unset 101 if isinstance(self.owner, Unset): 102 owner = UNSET 103 else: 104 owner = self.owner 105 106 created_at: str | Unset = UNSET 107 if not isinstance(self.created_at, Unset): 108 created_at = self.created_at.isoformat() 109 110 updated_at: str | Unset = UNSET 111 if not isinstance(self.updated_at, Unset): 112 updated_at = self.updated_at.isoformat() 113 114 field_dict: dict[str, Any] = {} 115 field_dict.update(self.additional_properties) 116 field_dict.update( 117 { 118 "id": id, 119 "name": name, 120 "description": description, 121 "dataType": data_type, 122 "executor": executor, 123 "childProcessIds": child_process_ids, 124 "parentProcessIds": parent_process_ids, 125 "linkedProjectIds": linked_project_ids, 126 "isTenantWide": is_tenant_wide, 127 "allowMultipleSources": allow_multiple_sources, 128 "usesSampleSheet": uses_sample_sheet, 129 "isArchived": is_archived, 130 } 131 ) 132 if category is not UNSET: 133 field_dict["category"] = category 134 if pipeline_type is not UNSET: 135 field_dict["pipelineType"] = pipeline_type 136 if documentation_url is not UNSET: 137 field_dict["documentationUrl"] = documentation_url 138 if file_requirements_message is not UNSET: 139 field_dict["fileRequirementsMessage"] = file_requirements_message 140 if owner is not UNSET: 141 field_dict["owner"] = owner 142 if created_at is not UNSET: 143 field_dict["createdAt"] = created_at 144 if updated_at is not UNSET: 145 field_dict["updatedAt"] = updated_at 146 147 return field_dict 148 149 @classmethod 150 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 151 d = dict(src_dict) 152 id = d.pop("id") 153 154 name = d.pop("name") 155 156 description = d.pop("description") 157 158 data_type = d.pop("dataType") 159 160 executor = Executor(d.pop("executor")) 161 162 child_process_ids = cast(list[str], d.pop("childProcessIds")) 163 164 parent_process_ids = cast(list[str], d.pop("parentProcessIds")) 165 166 linked_project_ids = cast(list[str], d.pop("linkedProjectIds")) 167 168 is_tenant_wide = d.pop("isTenantWide") 169 170 allow_multiple_sources = d.pop("allowMultipleSources") 171 172 uses_sample_sheet = d.pop("usesSampleSheet") 173 174 is_archived = d.pop("isArchived") 175 176 category = d.pop("category", UNSET) 177 178 pipeline_type = d.pop("pipelineType", UNSET) 179 180 documentation_url = d.pop("documentationUrl", UNSET) 181 182 file_requirements_message = d.pop("fileRequirementsMessage", UNSET) 183 184 def _parse_owner(data: object) -> None | str | Unset: 185 if data is None: 186 return data 187 if isinstance(data, Unset): 188 return data 189 return cast(None | str | Unset, data) 190 191 owner = _parse_owner(d.pop("owner", UNSET)) 192 193 _created_at = d.pop("createdAt", UNSET) 194 created_at: datetime.datetime | Unset 195 if isinstance(_created_at, Unset): 196 created_at = UNSET 197 else: 198 created_at = isoparse(_created_at) 199 200 _updated_at = d.pop("updatedAt", UNSET) 201 updated_at: datetime.datetime | Unset 202 if isinstance(_updated_at, Unset): 203 updated_at = UNSET 204 else: 205 updated_at = isoparse(_updated_at) 206 207 process = cls( 208 id=id, 209 name=name, 210 description=description, 211 data_type=data_type, 212 executor=executor, 213 child_process_ids=child_process_ids, 214 parent_process_ids=parent_process_ids, 215 linked_project_ids=linked_project_ids, 216 is_tenant_wide=is_tenant_wide, 217 allow_multiple_sources=allow_multiple_sources, 218 uses_sample_sheet=uses_sample_sheet, 219 is_archived=is_archived, 220 category=category, 221 pipeline_type=pipeline_type, 222 documentation_url=documentation_url, 223 file_requirements_message=file_requirements_message, 224 owner=owner, 225 created_at=created_at, 226 updated_at=updated_at, 227 ) 228 229 process.additional_properties = d 230 return process 231 232 @property 233 def additional_keys(self) -> list[str]: 234 return list(self.additional_properties.keys()) 235 236 def __getitem__(self, key: str) -> Any: 237 return self.additional_properties[key] 238 239 def __setitem__(self, key: str, value: Any) -> None: 240 self.additional_properties[key] = value 241 242 def __delitem__(self, key: str) -> None: 243 del self.additional_properties[key] 244 245 def __contains__(self, key: str) -> bool: 246 return key in self.additional_properties
Identifies a data type or pipeline in Cirro
Attributes:
- id (str): Unique ID of the Process Example: process-hutch-magic_flute-1_0.
- name (str): Friendly name for the process Example: MAGeCK Flute.
- description (str): Description of the process Example: MAGeCK Flute enables accurate identification of essential genes with their related biological functions.
- data_type (str): Name of the data type this pipeline produces (if it is not defined, use the name)
- executor (Executor): How the workflow is executed
- child_process_ids (list[str]): IDs of pipelines that can be run downstream
- parent_process_ids (list[str]): IDs of processes that can run this pipeline
- linked_project_ids (list[str]): Projects that can run this process
- is_tenant_wide (bool): Whether the process is shared with the tenant
- allow_multiple_sources (bool): Whether the pipeline is allowed to have multiple dataset sources
- uses_sample_sheet (bool): Whether the pipeline uses the Cirro-provided sample sheet
- is_archived (bool): Whether the process is marked as archived
- category (str | Unset): Category of the process Example: Microbial Analysis.
- pipeline_type (str | Unset): Type of pipeline Example: nf-core.
- documentation_url (str | Unset): Link to process documentation Example: https://docs.cirro.bio/pipelines/catalog_targeted_sequencing/#crispr-screen-analysis.
- file_requirements_message (str | Unset): Description of the files to be uploaded (optional)
- owner (None | str | Unset): Username of the pipeline creator (blank if Cirro curated)
- created_at (datetime.datetime | Unset): When the process was created (does not reflect the pipeline code)
- updated_at (datetime.datetime | Unset): When the process was updated (does not reflect the pipeline code)
42def __init__(self, id, name, description, data_type, executor, child_process_ids, parent_process_ids, linked_project_ids, is_tenant_wide, allow_multiple_sources, uses_sample_sheet, is_archived, category=attr_dict['category'].default, pipeline_type=attr_dict['pipeline_type'].default, documentation_url=attr_dict['documentation_url'].default, file_requirements_message=attr_dict['file_requirements_message'].default, owner=attr_dict['owner'].default, created_at=attr_dict['created_at'].default, updated_at=attr_dict['updated_at'].default): 43 self.id = id 44 self.name = name 45 self.description = description 46 self.data_type = data_type 47 self.executor = executor 48 self.child_process_ids = child_process_ids 49 self.parent_process_ids = parent_process_ids 50 self.linked_project_ids = linked_project_ids 51 self.is_tenant_wide = is_tenant_wide 52 self.allow_multiple_sources = allow_multiple_sources 53 self.uses_sample_sheet = uses_sample_sheet 54 self.is_archived = is_archived 55 self.category = category 56 self.pipeline_type = pipeline_type 57 self.documentation_url = documentation_url 58 self.file_requirements_message = file_requirements_message 59 self.owner = owner 60 self.created_at = created_at 61 self.updated_at = updated_at 62 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Process.
67 def to_dict(self) -> dict[str, Any]: 68 id = self.id 69 70 name = self.name 71 72 description = self.description 73 74 data_type = self.data_type 75 76 executor = self.executor.value 77 78 child_process_ids = self.child_process_ids 79 80 parent_process_ids = self.parent_process_ids 81 82 linked_project_ids = self.linked_project_ids 83 84 is_tenant_wide = self.is_tenant_wide 85 86 allow_multiple_sources = self.allow_multiple_sources 87 88 uses_sample_sheet = self.uses_sample_sheet 89 90 is_archived = self.is_archived 91 92 category = self.category 93 94 pipeline_type = self.pipeline_type 95 96 documentation_url = self.documentation_url 97 98 file_requirements_message = self.file_requirements_message 99 100 owner: None | str | Unset 101 if isinstance(self.owner, Unset): 102 owner = UNSET 103 else: 104 owner = self.owner 105 106 created_at: str | Unset = UNSET 107 if not isinstance(self.created_at, Unset): 108 created_at = self.created_at.isoformat() 109 110 updated_at: str | Unset = UNSET 111 if not isinstance(self.updated_at, Unset): 112 updated_at = self.updated_at.isoformat() 113 114 field_dict: dict[str, Any] = {} 115 field_dict.update(self.additional_properties) 116 field_dict.update( 117 { 118 "id": id, 119 "name": name, 120 "description": description, 121 "dataType": data_type, 122 "executor": executor, 123 "childProcessIds": child_process_ids, 124 "parentProcessIds": parent_process_ids, 125 "linkedProjectIds": linked_project_ids, 126 "isTenantWide": is_tenant_wide, 127 "allowMultipleSources": allow_multiple_sources, 128 "usesSampleSheet": uses_sample_sheet, 129 "isArchived": is_archived, 130 } 131 ) 132 if category is not UNSET: 133 field_dict["category"] = category 134 if pipeline_type is not UNSET: 135 field_dict["pipelineType"] = pipeline_type 136 if documentation_url is not UNSET: 137 field_dict["documentationUrl"] = documentation_url 138 if file_requirements_message is not UNSET: 139 field_dict["fileRequirementsMessage"] = file_requirements_message 140 if owner is not UNSET: 141 field_dict["owner"] = owner 142 if created_at is not UNSET: 143 field_dict["createdAt"] = created_at 144 if updated_at is not UNSET: 145 field_dict["updatedAt"] = updated_at 146 147 return field_dict
149 @classmethod 150 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 151 d = dict(src_dict) 152 id = d.pop("id") 153 154 name = d.pop("name") 155 156 description = d.pop("description") 157 158 data_type = d.pop("dataType") 159 160 executor = Executor(d.pop("executor")) 161 162 child_process_ids = cast(list[str], d.pop("childProcessIds")) 163 164 parent_process_ids = cast(list[str], d.pop("parentProcessIds")) 165 166 linked_project_ids = cast(list[str], d.pop("linkedProjectIds")) 167 168 is_tenant_wide = d.pop("isTenantWide") 169 170 allow_multiple_sources = d.pop("allowMultipleSources") 171 172 uses_sample_sheet = d.pop("usesSampleSheet") 173 174 is_archived = d.pop("isArchived") 175 176 category = d.pop("category", UNSET) 177 178 pipeline_type = d.pop("pipelineType", UNSET) 179 180 documentation_url = d.pop("documentationUrl", UNSET) 181 182 file_requirements_message = d.pop("fileRequirementsMessage", UNSET) 183 184 def _parse_owner(data: object) -> None | str | Unset: 185 if data is None: 186 return data 187 if isinstance(data, Unset): 188 return data 189 return cast(None | str | Unset, data) 190 191 owner = _parse_owner(d.pop("owner", UNSET)) 192 193 _created_at = d.pop("createdAt", UNSET) 194 created_at: datetime.datetime | Unset 195 if isinstance(_created_at, Unset): 196 created_at = UNSET 197 else: 198 created_at = isoparse(_created_at) 199 200 _updated_at = d.pop("updatedAt", UNSET) 201 updated_at: datetime.datetime | Unset 202 if isinstance(_updated_at, Unset): 203 updated_at = UNSET 204 else: 205 updated_at = isoparse(_updated_at) 206 207 process = cls( 208 id=id, 209 name=name, 210 description=description, 211 data_type=data_type, 212 executor=executor, 213 child_process_ids=child_process_ids, 214 parent_process_ids=parent_process_ids, 215 linked_project_ids=linked_project_ids, 216 is_tenant_wide=is_tenant_wide, 217 allow_multiple_sources=allow_multiple_sources, 218 uses_sample_sheet=uses_sample_sheet, 219 is_archived=is_archived, 220 category=category, 221 pipeline_type=pipeline_type, 222 documentation_url=documentation_url, 223 file_requirements_message=file_requirements_message, 224 owner=owner, 225 created_at=created_at, 226 updated_at=updated_at, 227 ) 228 229 process.additional_properties = d 230 return process
24@_attrs_define 25class ProcessDetail: 26 """Identifies a data type or pipeline in Cirro 27 28 Attributes: 29 id (str): Unique ID of the Process Example: process-hutch-magic_flute-1_0. 30 name (str): Friendly name for the process Example: MAGeCK Flute. 31 description (str): Description of the process Example: MAGeCK Flute enables accurate identification of essential 32 genes with their related biological functions. 33 data_type (str): Name of the data type this pipeline produces (if it is not defined, use the name) 34 executor (Executor): How the workflow is executed 35 child_process_ids (list[str]): IDs of pipelines that can be run downstream 36 parent_process_ids (list[str]): IDs of processes that can run this pipeline 37 linked_project_ids (list[str]): Projects that can run this process 38 is_tenant_wide (bool): Whether the process is shared with the tenant 39 allow_multiple_sources (bool): Whether the pipeline is allowed to have multiple dataset sources 40 uses_sample_sheet (bool): Whether the pipeline uses the Cirro-provided sample sheet 41 is_archived (bool): Whether the process is marked as archived 42 category (str | Unset): Category of the process Example: Microbial Analysis. 43 pipeline_type (str | Unset): Type of pipeline Example: nf-core. 44 documentation_url (str | Unset): Link to process documentation Example: 45 https://docs.cirro.bio/pipelines/catalog_targeted_sequencing/#crispr-screen-analysis. 46 file_requirements_message (str | Unset): Description of the files to be uploaded (optional) 47 pipeline_code (None | PipelineCode | Unset): 48 owner (None | str | Unset): Username of the pipeline creator (blank if Cirro curated) 49 custom_settings (CustomPipelineSettings | None | Unset): 50 file_mapping_rules (list[FileMappingRule] | None | Unset): 51 created_at (datetime.datetime | Unset): When the process was created (does not reflect the pipeline code) 52 updated_at (datetime.datetime | Unset): When the process was updated (does not reflect the pipeline code) 53 """ 54 55 id: str 56 name: str 57 description: str 58 data_type: str 59 executor: Executor 60 child_process_ids: list[str] 61 parent_process_ids: list[str] 62 linked_project_ids: list[str] 63 is_tenant_wide: bool 64 allow_multiple_sources: bool 65 uses_sample_sheet: bool 66 is_archived: bool 67 category: str | Unset = UNSET 68 pipeline_type: str | Unset = UNSET 69 documentation_url: str | Unset = UNSET 70 file_requirements_message: str | Unset = UNSET 71 pipeline_code: None | PipelineCode | Unset = UNSET 72 owner: None | str | Unset = UNSET 73 custom_settings: CustomPipelineSettings | None | Unset = UNSET 74 file_mapping_rules: list[FileMappingRule] | None | Unset = UNSET 75 created_at: datetime.datetime | Unset = UNSET 76 updated_at: datetime.datetime | Unset = UNSET 77 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 78 79 def to_dict(self) -> dict[str, Any]: 80 from ..models.custom_pipeline_settings import CustomPipelineSettings 81 from ..models.pipeline_code import PipelineCode 82 83 id = self.id 84 85 name = self.name 86 87 description = self.description 88 89 data_type = self.data_type 90 91 executor = self.executor.value 92 93 child_process_ids = self.child_process_ids 94 95 parent_process_ids = self.parent_process_ids 96 97 linked_project_ids = self.linked_project_ids 98 99 is_tenant_wide = self.is_tenant_wide 100 101 allow_multiple_sources = self.allow_multiple_sources 102 103 uses_sample_sheet = self.uses_sample_sheet 104 105 is_archived = self.is_archived 106 107 category = self.category 108 109 pipeline_type = self.pipeline_type 110 111 documentation_url = self.documentation_url 112 113 file_requirements_message = self.file_requirements_message 114 115 pipeline_code: dict[str, Any] | None | Unset 116 if isinstance(self.pipeline_code, Unset): 117 pipeline_code = UNSET 118 elif isinstance(self.pipeline_code, PipelineCode): 119 pipeline_code = self.pipeline_code.to_dict() 120 else: 121 pipeline_code = self.pipeline_code 122 123 owner: None | str | Unset 124 if isinstance(self.owner, Unset): 125 owner = UNSET 126 else: 127 owner = self.owner 128 129 custom_settings: dict[str, Any] | None | Unset 130 if isinstance(self.custom_settings, Unset): 131 custom_settings = UNSET 132 elif isinstance(self.custom_settings, CustomPipelineSettings): 133 custom_settings = self.custom_settings.to_dict() 134 else: 135 custom_settings = self.custom_settings 136 137 file_mapping_rules: list[dict[str, Any]] | None | Unset 138 if isinstance(self.file_mapping_rules, Unset): 139 file_mapping_rules = UNSET 140 elif isinstance(self.file_mapping_rules, list): 141 file_mapping_rules = [] 142 for file_mapping_rules_type_0_item_data in self.file_mapping_rules: 143 file_mapping_rules_type_0_item = file_mapping_rules_type_0_item_data.to_dict() 144 file_mapping_rules.append(file_mapping_rules_type_0_item) 145 146 else: 147 file_mapping_rules = self.file_mapping_rules 148 149 created_at: str | Unset = UNSET 150 if not isinstance(self.created_at, Unset): 151 created_at = self.created_at.isoformat() 152 153 updated_at: str | Unset = UNSET 154 if not isinstance(self.updated_at, Unset): 155 updated_at = self.updated_at.isoformat() 156 157 field_dict: dict[str, Any] = {} 158 field_dict.update(self.additional_properties) 159 field_dict.update( 160 { 161 "id": id, 162 "name": name, 163 "description": description, 164 "dataType": data_type, 165 "executor": executor, 166 "childProcessIds": child_process_ids, 167 "parentProcessIds": parent_process_ids, 168 "linkedProjectIds": linked_project_ids, 169 "isTenantWide": is_tenant_wide, 170 "allowMultipleSources": allow_multiple_sources, 171 "usesSampleSheet": uses_sample_sheet, 172 "isArchived": is_archived, 173 } 174 ) 175 if category is not UNSET: 176 field_dict["category"] = category 177 if pipeline_type is not UNSET: 178 field_dict["pipelineType"] = pipeline_type 179 if documentation_url is not UNSET: 180 field_dict["documentationUrl"] = documentation_url 181 if file_requirements_message is not UNSET: 182 field_dict["fileRequirementsMessage"] = file_requirements_message 183 if pipeline_code is not UNSET: 184 field_dict["pipelineCode"] = pipeline_code 185 if owner is not UNSET: 186 field_dict["owner"] = owner 187 if custom_settings is not UNSET: 188 field_dict["customSettings"] = custom_settings 189 if file_mapping_rules is not UNSET: 190 field_dict["fileMappingRules"] = file_mapping_rules 191 if created_at is not UNSET: 192 field_dict["createdAt"] = created_at 193 if updated_at is not UNSET: 194 field_dict["updatedAt"] = updated_at 195 196 return field_dict 197 198 @classmethod 199 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 200 from ..models.custom_pipeline_settings import CustomPipelineSettings 201 from ..models.file_mapping_rule import FileMappingRule 202 from ..models.pipeline_code import PipelineCode 203 204 d = dict(src_dict) 205 id = d.pop("id") 206 207 name = d.pop("name") 208 209 description = d.pop("description") 210 211 data_type = d.pop("dataType") 212 213 executor = Executor(d.pop("executor")) 214 215 child_process_ids = cast(list[str], d.pop("childProcessIds")) 216 217 parent_process_ids = cast(list[str], d.pop("parentProcessIds")) 218 219 linked_project_ids = cast(list[str], d.pop("linkedProjectIds")) 220 221 is_tenant_wide = d.pop("isTenantWide") 222 223 allow_multiple_sources = d.pop("allowMultipleSources") 224 225 uses_sample_sheet = d.pop("usesSampleSheet") 226 227 is_archived = d.pop("isArchived") 228 229 category = d.pop("category", UNSET) 230 231 pipeline_type = d.pop("pipelineType", UNSET) 232 233 documentation_url = d.pop("documentationUrl", UNSET) 234 235 file_requirements_message = d.pop("fileRequirementsMessage", UNSET) 236 237 def _parse_pipeline_code(data: object) -> None | PipelineCode | Unset: 238 if data is None: 239 return data 240 if isinstance(data, Unset): 241 return data 242 try: 243 if not isinstance(data, dict): 244 raise TypeError() 245 pipeline_code_type_1 = PipelineCode.from_dict(data) 246 247 return pipeline_code_type_1 248 except (TypeError, ValueError, AttributeError, KeyError): 249 pass 250 return cast(None | PipelineCode | Unset, data) 251 252 pipeline_code = _parse_pipeline_code(d.pop("pipelineCode", UNSET)) 253 254 def _parse_owner(data: object) -> None | str | Unset: 255 if data is None: 256 return data 257 if isinstance(data, Unset): 258 return data 259 return cast(None | str | Unset, data) 260 261 owner = _parse_owner(d.pop("owner", UNSET)) 262 263 def _parse_custom_settings(data: object) -> CustomPipelineSettings | None | Unset: 264 if data is None: 265 return data 266 if isinstance(data, Unset): 267 return data 268 try: 269 if not isinstance(data, dict): 270 raise TypeError() 271 custom_settings_type_1 = CustomPipelineSettings.from_dict(data) 272 273 return custom_settings_type_1 274 except (TypeError, ValueError, AttributeError, KeyError): 275 pass 276 return cast(CustomPipelineSettings | None | Unset, data) 277 278 custom_settings = _parse_custom_settings(d.pop("customSettings", UNSET)) 279 280 def _parse_file_mapping_rules(data: object) -> list[FileMappingRule] | None | Unset: 281 if data is None: 282 return data 283 if isinstance(data, Unset): 284 return data 285 try: 286 if not isinstance(data, list): 287 raise TypeError() 288 file_mapping_rules_type_0 = [] 289 _file_mapping_rules_type_0 = data 290 for file_mapping_rules_type_0_item_data in _file_mapping_rules_type_0: 291 file_mapping_rules_type_0_item = FileMappingRule.from_dict(file_mapping_rules_type_0_item_data) 292 293 file_mapping_rules_type_0.append(file_mapping_rules_type_0_item) 294 295 return file_mapping_rules_type_0 296 except (TypeError, ValueError, AttributeError, KeyError): 297 pass 298 return cast(list[FileMappingRule] | None | Unset, data) 299 300 file_mapping_rules = _parse_file_mapping_rules(d.pop("fileMappingRules", UNSET)) 301 302 _created_at = d.pop("createdAt", UNSET) 303 created_at: datetime.datetime | Unset 304 if isinstance(_created_at, Unset): 305 created_at = UNSET 306 else: 307 created_at = isoparse(_created_at) 308 309 _updated_at = d.pop("updatedAt", UNSET) 310 updated_at: datetime.datetime | Unset 311 if isinstance(_updated_at, Unset): 312 updated_at = UNSET 313 else: 314 updated_at = isoparse(_updated_at) 315 316 process_detail = cls( 317 id=id, 318 name=name, 319 description=description, 320 data_type=data_type, 321 executor=executor, 322 child_process_ids=child_process_ids, 323 parent_process_ids=parent_process_ids, 324 linked_project_ids=linked_project_ids, 325 is_tenant_wide=is_tenant_wide, 326 allow_multiple_sources=allow_multiple_sources, 327 uses_sample_sheet=uses_sample_sheet, 328 is_archived=is_archived, 329 category=category, 330 pipeline_type=pipeline_type, 331 documentation_url=documentation_url, 332 file_requirements_message=file_requirements_message, 333 pipeline_code=pipeline_code, 334 owner=owner, 335 custom_settings=custom_settings, 336 file_mapping_rules=file_mapping_rules, 337 created_at=created_at, 338 updated_at=updated_at, 339 ) 340 341 process_detail.additional_properties = d 342 return process_detail 343 344 @property 345 def additional_keys(self) -> list[str]: 346 return list(self.additional_properties.keys()) 347 348 def __getitem__(self, key: str) -> Any: 349 return self.additional_properties[key] 350 351 def __setitem__(self, key: str, value: Any) -> None: 352 self.additional_properties[key] = value 353 354 def __delitem__(self, key: str) -> None: 355 del self.additional_properties[key] 356 357 def __contains__(self, key: str) -> bool: 358 return key in self.additional_properties
Identifies a data type or pipeline in Cirro
Attributes:
- id (str): Unique ID of the Process Example: process-hutch-magic_flute-1_0.
- name (str): Friendly name for the process Example: MAGeCK Flute.
- description (str): Description of the process Example: MAGeCK Flute enables accurate identification of essential genes with their related biological functions.
- data_type (str): Name of the data type this pipeline produces (if it is not defined, use the name)
- executor (Executor): How the workflow is executed
- child_process_ids (list[str]): IDs of pipelines that can be run downstream
- parent_process_ids (list[str]): IDs of processes that can run this pipeline
- linked_project_ids (list[str]): Projects that can run this process
- is_tenant_wide (bool): Whether the process is shared with the tenant
- allow_multiple_sources (bool): Whether the pipeline is allowed to have multiple dataset sources
- uses_sample_sheet (bool): Whether the pipeline uses the Cirro-provided sample sheet
- is_archived (bool): Whether the process is marked as archived
- category (str | Unset): Category of the process Example: Microbial Analysis.
- pipeline_type (str | Unset): Type of pipeline Example: nf-core.
- documentation_url (str | Unset): Link to process documentation Example: https://docs.cirro.bio/pipelines/catalog_targeted_sequencing/#crispr-screen-analysis.
- file_requirements_message (str | Unset): Description of the files to be uploaded (optional)
- pipeline_code (None | PipelineCode | Unset):
- owner (None | str | Unset): Username of the pipeline creator (blank if Cirro curated)
- custom_settings (CustomPipelineSettings | None | Unset):
- file_mapping_rules (list[FileMappingRule] | None | Unset):
- created_at (datetime.datetime | Unset): When the process was created (does not reflect the pipeline code)
- updated_at (datetime.datetime | Unset): When the process was updated (does not reflect the pipeline code)
45def __init__(self, id, name, description, data_type, executor, child_process_ids, parent_process_ids, linked_project_ids, is_tenant_wide, allow_multiple_sources, uses_sample_sheet, is_archived, category=attr_dict['category'].default, pipeline_type=attr_dict['pipeline_type'].default, documentation_url=attr_dict['documentation_url'].default, file_requirements_message=attr_dict['file_requirements_message'].default, pipeline_code=attr_dict['pipeline_code'].default, owner=attr_dict['owner'].default, custom_settings=attr_dict['custom_settings'].default, file_mapping_rules=attr_dict['file_mapping_rules'].default, created_at=attr_dict['created_at'].default, updated_at=attr_dict['updated_at'].default): 46 self.id = id 47 self.name = name 48 self.description = description 49 self.data_type = data_type 50 self.executor = executor 51 self.child_process_ids = child_process_ids 52 self.parent_process_ids = parent_process_ids 53 self.linked_project_ids = linked_project_ids 54 self.is_tenant_wide = is_tenant_wide 55 self.allow_multiple_sources = allow_multiple_sources 56 self.uses_sample_sheet = uses_sample_sheet 57 self.is_archived = is_archived 58 self.category = category 59 self.pipeline_type = pipeline_type 60 self.documentation_url = documentation_url 61 self.file_requirements_message = file_requirements_message 62 self.pipeline_code = pipeline_code 63 self.owner = owner 64 self.custom_settings = custom_settings 65 self.file_mapping_rules = file_mapping_rules 66 self.created_at = created_at 67 self.updated_at = updated_at 68 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProcessDetail.
79 def to_dict(self) -> dict[str, Any]: 80 from ..models.custom_pipeline_settings import CustomPipelineSettings 81 from ..models.pipeline_code import PipelineCode 82 83 id = self.id 84 85 name = self.name 86 87 description = self.description 88 89 data_type = self.data_type 90 91 executor = self.executor.value 92 93 child_process_ids = self.child_process_ids 94 95 parent_process_ids = self.parent_process_ids 96 97 linked_project_ids = self.linked_project_ids 98 99 is_tenant_wide = self.is_tenant_wide 100 101 allow_multiple_sources = self.allow_multiple_sources 102 103 uses_sample_sheet = self.uses_sample_sheet 104 105 is_archived = self.is_archived 106 107 category = self.category 108 109 pipeline_type = self.pipeline_type 110 111 documentation_url = self.documentation_url 112 113 file_requirements_message = self.file_requirements_message 114 115 pipeline_code: dict[str, Any] | None | Unset 116 if isinstance(self.pipeline_code, Unset): 117 pipeline_code = UNSET 118 elif isinstance(self.pipeline_code, PipelineCode): 119 pipeline_code = self.pipeline_code.to_dict() 120 else: 121 pipeline_code = self.pipeline_code 122 123 owner: None | str | Unset 124 if isinstance(self.owner, Unset): 125 owner = UNSET 126 else: 127 owner = self.owner 128 129 custom_settings: dict[str, Any] | None | Unset 130 if isinstance(self.custom_settings, Unset): 131 custom_settings = UNSET 132 elif isinstance(self.custom_settings, CustomPipelineSettings): 133 custom_settings = self.custom_settings.to_dict() 134 else: 135 custom_settings = self.custom_settings 136 137 file_mapping_rules: list[dict[str, Any]] | None | Unset 138 if isinstance(self.file_mapping_rules, Unset): 139 file_mapping_rules = UNSET 140 elif isinstance(self.file_mapping_rules, list): 141 file_mapping_rules = [] 142 for file_mapping_rules_type_0_item_data in self.file_mapping_rules: 143 file_mapping_rules_type_0_item = file_mapping_rules_type_0_item_data.to_dict() 144 file_mapping_rules.append(file_mapping_rules_type_0_item) 145 146 else: 147 file_mapping_rules = self.file_mapping_rules 148 149 created_at: str | Unset = UNSET 150 if not isinstance(self.created_at, Unset): 151 created_at = self.created_at.isoformat() 152 153 updated_at: str | Unset = UNSET 154 if not isinstance(self.updated_at, Unset): 155 updated_at = self.updated_at.isoformat() 156 157 field_dict: dict[str, Any] = {} 158 field_dict.update(self.additional_properties) 159 field_dict.update( 160 { 161 "id": id, 162 "name": name, 163 "description": description, 164 "dataType": data_type, 165 "executor": executor, 166 "childProcessIds": child_process_ids, 167 "parentProcessIds": parent_process_ids, 168 "linkedProjectIds": linked_project_ids, 169 "isTenantWide": is_tenant_wide, 170 "allowMultipleSources": allow_multiple_sources, 171 "usesSampleSheet": uses_sample_sheet, 172 "isArchived": is_archived, 173 } 174 ) 175 if category is not UNSET: 176 field_dict["category"] = category 177 if pipeline_type is not UNSET: 178 field_dict["pipelineType"] = pipeline_type 179 if documentation_url is not UNSET: 180 field_dict["documentationUrl"] = documentation_url 181 if file_requirements_message is not UNSET: 182 field_dict["fileRequirementsMessage"] = file_requirements_message 183 if pipeline_code is not UNSET: 184 field_dict["pipelineCode"] = pipeline_code 185 if owner is not UNSET: 186 field_dict["owner"] = owner 187 if custom_settings is not UNSET: 188 field_dict["customSettings"] = custom_settings 189 if file_mapping_rules is not UNSET: 190 field_dict["fileMappingRules"] = file_mapping_rules 191 if created_at is not UNSET: 192 field_dict["createdAt"] = created_at 193 if updated_at is not UNSET: 194 field_dict["updatedAt"] = updated_at 195 196 return field_dict
198 @classmethod 199 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 200 from ..models.custom_pipeline_settings import CustomPipelineSettings 201 from ..models.file_mapping_rule import FileMappingRule 202 from ..models.pipeline_code import PipelineCode 203 204 d = dict(src_dict) 205 id = d.pop("id") 206 207 name = d.pop("name") 208 209 description = d.pop("description") 210 211 data_type = d.pop("dataType") 212 213 executor = Executor(d.pop("executor")) 214 215 child_process_ids = cast(list[str], d.pop("childProcessIds")) 216 217 parent_process_ids = cast(list[str], d.pop("parentProcessIds")) 218 219 linked_project_ids = cast(list[str], d.pop("linkedProjectIds")) 220 221 is_tenant_wide = d.pop("isTenantWide") 222 223 allow_multiple_sources = d.pop("allowMultipleSources") 224 225 uses_sample_sheet = d.pop("usesSampleSheet") 226 227 is_archived = d.pop("isArchived") 228 229 category = d.pop("category", UNSET) 230 231 pipeline_type = d.pop("pipelineType", UNSET) 232 233 documentation_url = d.pop("documentationUrl", UNSET) 234 235 file_requirements_message = d.pop("fileRequirementsMessage", UNSET) 236 237 def _parse_pipeline_code(data: object) -> None | PipelineCode | Unset: 238 if data is None: 239 return data 240 if isinstance(data, Unset): 241 return data 242 try: 243 if not isinstance(data, dict): 244 raise TypeError() 245 pipeline_code_type_1 = PipelineCode.from_dict(data) 246 247 return pipeline_code_type_1 248 except (TypeError, ValueError, AttributeError, KeyError): 249 pass 250 return cast(None | PipelineCode | Unset, data) 251 252 pipeline_code = _parse_pipeline_code(d.pop("pipelineCode", UNSET)) 253 254 def _parse_owner(data: object) -> None | str | Unset: 255 if data is None: 256 return data 257 if isinstance(data, Unset): 258 return data 259 return cast(None | str | Unset, data) 260 261 owner = _parse_owner(d.pop("owner", UNSET)) 262 263 def _parse_custom_settings(data: object) -> CustomPipelineSettings | None | Unset: 264 if data is None: 265 return data 266 if isinstance(data, Unset): 267 return data 268 try: 269 if not isinstance(data, dict): 270 raise TypeError() 271 custom_settings_type_1 = CustomPipelineSettings.from_dict(data) 272 273 return custom_settings_type_1 274 except (TypeError, ValueError, AttributeError, KeyError): 275 pass 276 return cast(CustomPipelineSettings | None | Unset, data) 277 278 custom_settings = _parse_custom_settings(d.pop("customSettings", UNSET)) 279 280 def _parse_file_mapping_rules(data: object) -> list[FileMappingRule] | None | Unset: 281 if data is None: 282 return data 283 if isinstance(data, Unset): 284 return data 285 try: 286 if not isinstance(data, list): 287 raise TypeError() 288 file_mapping_rules_type_0 = [] 289 _file_mapping_rules_type_0 = data 290 for file_mapping_rules_type_0_item_data in _file_mapping_rules_type_0: 291 file_mapping_rules_type_0_item = FileMappingRule.from_dict(file_mapping_rules_type_0_item_data) 292 293 file_mapping_rules_type_0.append(file_mapping_rules_type_0_item) 294 295 return file_mapping_rules_type_0 296 except (TypeError, ValueError, AttributeError, KeyError): 297 pass 298 return cast(list[FileMappingRule] | None | Unset, data) 299 300 file_mapping_rules = _parse_file_mapping_rules(d.pop("fileMappingRules", UNSET)) 301 302 _created_at = d.pop("createdAt", UNSET) 303 created_at: datetime.datetime | Unset 304 if isinstance(_created_at, Unset): 305 created_at = UNSET 306 else: 307 created_at = isoparse(_created_at) 308 309 _updated_at = d.pop("updatedAt", UNSET) 310 updated_at: datetime.datetime | Unset 311 if isinstance(_updated_at, Unset): 312 updated_at = UNSET 313 else: 314 updated_at = isoparse(_updated_at) 315 316 process_detail = cls( 317 id=id, 318 name=name, 319 description=description, 320 data_type=data_type, 321 executor=executor, 322 child_process_ids=child_process_ids, 323 parent_process_ids=parent_process_ids, 324 linked_project_ids=linked_project_ids, 325 is_tenant_wide=is_tenant_wide, 326 allow_multiple_sources=allow_multiple_sources, 327 uses_sample_sheet=uses_sample_sheet, 328 is_archived=is_archived, 329 category=category, 330 pipeline_type=pipeline_type, 331 documentation_url=documentation_url, 332 file_requirements_message=file_requirements_message, 333 pipeline_code=pipeline_code, 334 owner=owner, 335 custom_settings=custom_settings, 336 file_mapping_rules=file_mapping_rules, 337 created_at=created_at, 338 updated_at=updated_at, 339 ) 340 341 process_detail.additional_properties = d 342 return process_detail
15@_attrs_define 16class ProcessDocumentation: 17 """ 18 Attributes: 19 docs_uri (str | Unset): Full URI to documentation 20 partial_uri (str | Unset): URI of process documentation (partial) 21 content (str | Unset): Documentation content 22 """ 23 24 docs_uri: str | Unset = UNSET 25 partial_uri: str | Unset = UNSET 26 content: str | Unset = UNSET 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 docs_uri = self.docs_uri 31 32 partial_uri = self.partial_uri 33 34 content = self.content 35 36 field_dict: dict[str, Any] = {} 37 field_dict.update(self.additional_properties) 38 field_dict.update({}) 39 if docs_uri is not UNSET: 40 field_dict["docsUri"] = docs_uri 41 if partial_uri is not UNSET: 42 field_dict["partialUri"] = partial_uri 43 if content is not UNSET: 44 field_dict["content"] = content 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 d = dict(src_dict) 51 docs_uri = d.pop("docsUri", UNSET) 52 53 partial_uri = d.pop("partialUri", UNSET) 54 55 content = d.pop("content", UNSET) 56 57 process_documentation = cls( 58 docs_uri=docs_uri, 59 partial_uri=partial_uri, 60 content=content, 61 ) 62 63 process_documentation.additional_properties = d 64 return process_documentation 65 66 @property 67 def additional_keys(self) -> list[str]: 68 return list(self.additional_properties.keys()) 69 70 def __getitem__(self, key: str) -> Any: 71 return self.additional_properties[key] 72 73 def __setitem__(self, key: str, value: Any) -> None: 74 self.additional_properties[key] = value 75 76 def __delitem__(self, key: str) -> None: 77 del self.additional_properties[key] 78 79 def __contains__(self, key: str) -> bool: 80 return key in self.additional_properties
Attributes:
- docs_uri (str | Unset): Full URI to documentation
- partial_uri (str | Unset): URI of process documentation (partial)
- content (str | Unset): Documentation content
26def __init__(self, docs_uri=attr_dict['docs_uri'].default, partial_uri=attr_dict['partial_uri'].default, content=attr_dict['content'].default): 27 self.docs_uri = docs_uri 28 self.partial_uri = partial_uri 29 self.content = content 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProcessDocumentation.
29 def to_dict(self) -> dict[str, Any]: 30 docs_uri = self.docs_uri 31 32 partial_uri = self.partial_uri 33 34 content = self.content 35 36 field_dict: dict[str, Any] = {} 37 field_dict.update(self.additional_properties) 38 field_dict.update({}) 39 if docs_uri is not UNSET: 40 field_dict["docsUri"] = docs_uri 41 if partial_uri is not UNSET: 42 field_dict["partialUri"] = partial_uri 43 if content is not UNSET: 44 field_dict["content"] = content 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 d = dict(src_dict) 51 docs_uri = d.pop("docsUri", UNSET) 52 53 partial_uri = d.pop("partialUri", UNSET) 54 55 content = d.pop("content", UNSET) 56 57 process_documentation = cls( 58 docs_uri=docs_uri, 59 partial_uri=partial_uri, 60 content=content, 61 ) 62 63 process_documentation.additional_properties = d 64 return process_documentation
19@_attrs_define 20class Project: 21 """ 22 Attributes: 23 id (str): 24 name (str): 25 description (str): 26 status (Status): 27 tags (list[Tag]): 28 organization (str): 29 classification_ids (list[str]): 30 billing_account_id (str): 31 """ 32 33 id: str 34 name: str 35 description: str 36 status: Status 37 tags: list[Tag] 38 organization: str 39 classification_ids: list[str] 40 billing_account_id: str 41 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 42 43 def to_dict(self) -> dict[str, Any]: 44 id = self.id 45 46 name = self.name 47 48 description = self.description 49 50 status = self.status.value 51 52 tags = [] 53 for tags_item_data in self.tags: 54 tags_item = tags_item_data.to_dict() 55 tags.append(tags_item) 56 57 organization = self.organization 58 59 classification_ids = self.classification_ids 60 61 billing_account_id = self.billing_account_id 62 63 field_dict: dict[str, Any] = {} 64 field_dict.update(self.additional_properties) 65 field_dict.update( 66 { 67 "id": id, 68 "name": name, 69 "description": description, 70 "status": status, 71 "tags": tags, 72 "organization": organization, 73 "classificationIds": classification_ids, 74 "billingAccountId": billing_account_id, 75 } 76 ) 77 78 return field_dict 79 80 @classmethod 81 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 82 from ..models.tag import Tag 83 84 d = dict(src_dict) 85 id = d.pop("id") 86 87 name = d.pop("name") 88 89 description = d.pop("description") 90 91 status = Status(d.pop("status")) 92 93 tags = [] 94 _tags = d.pop("tags") 95 for tags_item_data in _tags: 96 tags_item = Tag.from_dict(tags_item_data) 97 98 tags.append(tags_item) 99 100 organization = d.pop("organization") 101 102 classification_ids = cast(list[str], d.pop("classificationIds")) 103 104 billing_account_id = d.pop("billingAccountId") 105 106 project = cls( 107 id=id, 108 name=name, 109 description=description, 110 status=status, 111 tags=tags, 112 organization=organization, 113 classification_ids=classification_ids, 114 billing_account_id=billing_account_id, 115 ) 116 117 project.additional_properties = d 118 return project 119 120 @property 121 def additional_keys(self) -> list[str]: 122 return list(self.additional_properties.keys()) 123 124 def __getitem__(self, key: str) -> Any: 125 return self.additional_properties[key] 126 127 def __setitem__(self, key: str, value: Any) -> None: 128 self.additional_properties[key] = value 129 130 def __delitem__(self, key: str) -> None: 131 del self.additional_properties[key] 132 133 def __contains__(self, key: str) -> bool: 134 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- description (str):
- status (Status):
- tags (list[Tag]):
- organization (str):
- classification_ids (list[str]):
- billing_account_id (str):
31def __init__(self, id, name, description, status, tags, organization, classification_ids, billing_account_id): 32 self.id = id 33 self.name = name 34 self.description = description 35 self.status = status 36 self.tags = tags 37 self.organization = organization 38 self.classification_ids = classification_ids 39 self.billing_account_id = billing_account_id 40 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Project.
43 def to_dict(self) -> dict[str, Any]: 44 id = self.id 45 46 name = self.name 47 48 description = self.description 49 50 status = self.status.value 51 52 tags = [] 53 for tags_item_data in self.tags: 54 tags_item = tags_item_data.to_dict() 55 tags.append(tags_item) 56 57 organization = self.organization 58 59 classification_ids = self.classification_ids 60 61 billing_account_id = self.billing_account_id 62 63 field_dict: dict[str, Any] = {} 64 field_dict.update(self.additional_properties) 65 field_dict.update( 66 { 67 "id": id, 68 "name": name, 69 "description": description, 70 "status": status, 71 "tags": tags, 72 "organization": organization, 73 "classificationIds": classification_ids, 74 "billingAccountId": billing_account_id, 75 } 76 ) 77 78 return field_dict
80 @classmethod 81 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 82 from ..models.tag import Tag 83 84 d = dict(src_dict) 85 id = d.pop("id") 86 87 name = d.pop("name") 88 89 description = d.pop("description") 90 91 status = Status(d.pop("status")) 92 93 tags = [] 94 _tags = d.pop("tags") 95 for tags_item_data in _tags: 96 tags_item = Tag.from_dict(tags_item_data) 97 98 tags.append(tags_item) 99 100 organization = d.pop("organization") 101 102 classification_ids = cast(list[str], d.pop("classificationIds")) 103 104 billing_account_id = d.pop("billingAccountId") 105 106 project = cls( 107 id=id, 108 name=name, 109 description=description, 110 status=status, 111 tags=tags, 112 organization=organization, 113 classification_ids=classification_ids, 114 billing_account_id=billing_account_id, 115 ) 116 117 project.additional_properties = d 118 return project
18@_attrs_define 19class ProjectAccessRequest: 20 """ 21 Attributes: 22 id (str): 23 username (str): 24 project_id (str): 25 role (ProjectRole): 26 message (str): 27 status (RequestStatus): 28 reviewer_username (str): 29 created_at (datetime.datetime): 30 expiry (datetime.datetime): 31 """ 32 33 id: str 34 username: str 35 project_id: str 36 role: ProjectRole 37 message: str 38 status: RequestStatus 39 reviewer_username: str 40 created_at: datetime.datetime 41 expiry: datetime.datetime 42 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 43 44 def to_dict(self) -> dict[str, Any]: 45 id = self.id 46 47 username = self.username 48 49 project_id = self.project_id 50 51 role = self.role.value 52 53 message = self.message 54 55 status = self.status.value 56 57 reviewer_username = self.reviewer_username 58 59 created_at = self.created_at.isoformat() 60 61 expiry = self.expiry.isoformat() 62 63 field_dict: dict[str, Any] = {} 64 field_dict.update(self.additional_properties) 65 field_dict.update( 66 { 67 "id": id, 68 "username": username, 69 "projectId": project_id, 70 "role": role, 71 "message": message, 72 "status": status, 73 "reviewerUsername": reviewer_username, 74 "createdAt": created_at, 75 "expiry": expiry, 76 } 77 ) 78 79 return field_dict 80 81 @classmethod 82 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 83 d = dict(src_dict) 84 id = d.pop("id") 85 86 username = d.pop("username") 87 88 project_id = d.pop("projectId") 89 90 role = ProjectRole(d.pop("role")) 91 92 message = d.pop("message") 93 94 status = RequestStatus(d.pop("status")) 95 96 reviewer_username = d.pop("reviewerUsername") 97 98 created_at = isoparse(d.pop("createdAt")) 99 100 expiry = isoparse(d.pop("expiry")) 101 102 project_access_request = cls( 103 id=id, 104 username=username, 105 project_id=project_id, 106 role=role, 107 message=message, 108 status=status, 109 reviewer_username=reviewer_username, 110 created_at=created_at, 111 expiry=expiry, 112 ) 113 114 project_access_request.additional_properties = d 115 return project_access_request 116 117 @property 118 def additional_keys(self) -> list[str]: 119 return list(self.additional_properties.keys()) 120 121 def __getitem__(self, key: str) -> Any: 122 return self.additional_properties[key] 123 124 def __setitem__(self, key: str, value: Any) -> None: 125 self.additional_properties[key] = value 126 127 def __delitem__(self, key: str) -> None: 128 del self.additional_properties[key] 129 130 def __contains__(self, key: str) -> bool: 131 return key in self.additional_properties
Attributes:
- id (str):
- username (str):
- project_id (str):
- role (ProjectRole):
- message (str):
- status (RequestStatus):
- reviewer_username (str):
- created_at (datetime.datetime):
- expiry (datetime.datetime):
32def __init__(self, id, username, project_id, role, message, status, reviewer_username, created_at, expiry): 33 self.id = id 34 self.username = username 35 self.project_id = project_id 36 self.role = role 37 self.message = message 38 self.status = status 39 self.reviewer_username = reviewer_username 40 self.created_at = created_at 41 self.expiry = expiry 42 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectAccessRequest.
44 def to_dict(self) -> dict[str, Any]: 45 id = self.id 46 47 username = self.username 48 49 project_id = self.project_id 50 51 role = self.role.value 52 53 message = self.message 54 55 status = self.status.value 56 57 reviewer_username = self.reviewer_username 58 59 created_at = self.created_at.isoformat() 60 61 expiry = self.expiry.isoformat() 62 63 field_dict: dict[str, Any] = {} 64 field_dict.update(self.additional_properties) 65 field_dict.update( 66 { 67 "id": id, 68 "username": username, 69 "projectId": project_id, 70 "role": role, 71 "message": message, 72 "status": status, 73 "reviewerUsername": reviewer_username, 74 "createdAt": created_at, 75 "expiry": expiry, 76 } 77 ) 78 79 return field_dict
81 @classmethod 82 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 83 d = dict(src_dict) 84 id = d.pop("id") 85 86 username = d.pop("username") 87 88 project_id = d.pop("projectId") 89 90 role = ProjectRole(d.pop("role")) 91 92 message = d.pop("message") 93 94 status = RequestStatus(d.pop("status")) 95 96 reviewer_username = d.pop("reviewerUsername") 97 98 created_at = isoparse(d.pop("createdAt")) 99 100 expiry = isoparse(d.pop("expiry")) 101 102 project_access_request = cls( 103 id=id, 104 username=username, 105 project_id=project_id, 106 role=role, 107 message=message, 108 status=status, 109 reviewer_username=reviewer_username, 110 created_at=created_at, 111 expiry=expiry, 112 ) 113 114 project_access_request.additional_properties = d 115 return project_access_request
5class ProjectAccessType(str, Enum): 6 DATASET_UPLOAD = "DATASET_UPLOAD" 7 PROJECT_DOWNLOAD = "PROJECT_DOWNLOAD" 8 REFERENCE_UPLOAD = "REFERENCE_UPLOAD" 9 SAMPLESHEET_UPLOAD = "SAMPLESHEET_UPLOAD" 10 SHARED_DATASET_DOWNLOAD = "SHARED_DATASET_DOWNLOAD" 11 UNKNOWN = "UNKNOWN" 12 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 13 14 def __str__(self) -> str: 15 return str(self.value) 16 17 @classmethod 18 def _missing_(cls, number): 19 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
15@_attrs_define 16class ProjectCreateOptions: 17 """ 18 Attributes: 19 enabled_account_types (list[CloudAccountType]): 20 portal_account_id (str): 21 portal_region (str): 22 template_url (str): 23 wizard_url (str): 24 """ 25 26 enabled_account_types: list[CloudAccountType] 27 portal_account_id: str 28 portal_region: str 29 template_url: str 30 wizard_url: str 31 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 32 33 def to_dict(self) -> dict[str, Any]: 34 enabled_account_types = [] 35 for enabled_account_types_item_data in self.enabled_account_types: 36 enabled_account_types_item = enabled_account_types_item_data.value 37 enabled_account_types.append(enabled_account_types_item) 38 39 portal_account_id = self.portal_account_id 40 41 portal_region = self.portal_region 42 43 template_url = self.template_url 44 45 wizard_url = self.wizard_url 46 47 field_dict: dict[str, Any] = {} 48 field_dict.update(self.additional_properties) 49 field_dict.update( 50 { 51 "enabledAccountTypes": enabled_account_types, 52 "portalAccountId": portal_account_id, 53 "portalRegion": portal_region, 54 "templateUrl": template_url, 55 "wizardUrl": wizard_url, 56 } 57 ) 58 59 return field_dict 60 61 @classmethod 62 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 63 d = dict(src_dict) 64 enabled_account_types = [] 65 _enabled_account_types = d.pop("enabledAccountTypes") 66 for enabled_account_types_item_data in _enabled_account_types: 67 enabled_account_types_item = CloudAccountType(enabled_account_types_item_data) 68 69 enabled_account_types.append(enabled_account_types_item) 70 71 portal_account_id = d.pop("portalAccountId") 72 73 portal_region = d.pop("portalRegion") 74 75 template_url = d.pop("templateUrl") 76 77 wizard_url = d.pop("wizardUrl") 78 79 project_create_options = cls( 80 enabled_account_types=enabled_account_types, 81 portal_account_id=portal_account_id, 82 portal_region=portal_region, 83 template_url=template_url, 84 wizard_url=wizard_url, 85 ) 86 87 project_create_options.additional_properties = d 88 return project_create_options 89 90 @property 91 def additional_keys(self) -> list[str]: 92 return list(self.additional_properties.keys()) 93 94 def __getitem__(self, key: str) -> Any: 95 return self.additional_properties[key] 96 97 def __setitem__(self, key: str, value: Any) -> None: 98 self.additional_properties[key] = value 99 100 def __delitem__(self, key: str) -> None: 101 del self.additional_properties[key] 102 103 def __contains__(self, key: str) -> bool: 104 return key in self.additional_properties
Attributes:
- enabled_account_types (list[CloudAccountType]):
- portal_account_id (str):
- portal_region (str):
- template_url (str):
- wizard_url (str):
28def __init__(self, enabled_account_types, portal_account_id, portal_region, template_url, wizard_url): 29 self.enabled_account_types = enabled_account_types 30 self.portal_account_id = portal_account_id 31 self.portal_region = portal_region 32 self.template_url = template_url 33 self.wizard_url = wizard_url 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectCreateOptions.
33 def to_dict(self) -> dict[str, Any]: 34 enabled_account_types = [] 35 for enabled_account_types_item_data in self.enabled_account_types: 36 enabled_account_types_item = enabled_account_types_item_data.value 37 enabled_account_types.append(enabled_account_types_item) 38 39 portal_account_id = self.portal_account_id 40 41 portal_region = self.portal_region 42 43 template_url = self.template_url 44 45 wizard_url = self.wizard_url 46 47 field_dict: dict[str, Any] = {} 48 field_dict.update(self.additional_properties) 49 field_dict.update( 50 { 51 "enabledAccountTypes": enabled_account_types, 52 "portalAccountId": portal_account_id, 53 "portalRegion": portal_region, 54 "templateUrl": template_url, 55 "wizardUrl": wizard_url, 56 } 57 ) 58 59 return field_dict
61 @classmethod 62 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 63 d = dict(src_dict) 64 enabled_account_types = [] 65 _enabled_account_types = d.pop("enabledAccountTypes") 66 for enabled_account_types_item_data in _enabled_account_types: 67 enabled_account_types_item = CloudAccountType(enabled_account_types_item_data) 68 69 enabled_account_types.append(enabled_account_types_item) 70 71 portal_account_id = d.pop("portalAccountId") 72 73 portal_region = d.pop("portalRegion") 74 75 template_url = d.pop("templateUrl") 76 77 wizard_url = d.pop("wizardUrl") 78 79 project_create_options = cls( 80 enabled_account_types=enabled_account_types, 81 portal_account_id=portal_account_id, 82 portal_region=portal_region, 83 template_url=template_url, 84 wizard_url=wizard_url, 85 ) 86 87 project_create_options.additional_properties = d 88 return project_create_options
25@_attrs_define 26class ProjectDetail: 27 """ 28 Attributes: 29 id (str): 30 name (str): 31 description (str): 32 billing_account_id (str): 33 contacts (list[Contact]): 34 organization (str): 35 status (Status): 36 settings (ProjectSettings): 37 account (CloudAccount): 38 status_message (str): 39 tags (list[Tag]): 40 classification_ids (list[str]): 41 created_by (str): 42 created_at (datetime.datetime): 43 updated_at (datetime.datetime): 44 deployed_at (datetime.datetime | None | Unset): 45 """ 46 47 id: str 48 name: str 49 description: str 50 billing_account_id: str 51 contacts: list[Contact] 52 organization: str 53 status: Status 54 settings: ProjectSettings 55 account: CloudAccount 56 status_message: str 57 tags: list[Tag] 58 classification_ids: list[str] 59 created_by: str 60 created_at: datetime.datetime 61 updated_at: datetime.datetime 62 deployed_at: datetime.datetime | None | Unset = UNSET 63 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 64 65 def to_dict(self) -> dict[str, Any]: 66 id = self.id 67 68 name = self.name 69 70 description = self.description 71 72 billing_account_id = self.billing_account_id 73 74 contacts = [] 75 for contacts_item_data in self.contacts: 76 contacts_item = contacts_item_data.to_dict() 77 contacts.append(contacts_item) 78 79 organization = self.organization 80 81 status = self.status.value 82 83 settings = self.settings.to_dict() 84 85 account = self.account.to_dict() 86 87 status_message = self.status_message 88 89 tags = [] 90 for tags_item_data in self.tags: 91 tags_item = tags_item_data.to_dict() 92 tags.append(tags_item) 93 94 classification_ids = self.classification_ids 95 96 created_by = self.created_by 97 98 created_at = self.created_at.isoformat() 99 100 updated_at = self.updated_at.isoformat() 101 102 deployed_at: None | str | Unset 103 if isinstance(self.deployed_at, Unset): 104 deployed_at = UNSET 105 elif isinstance(self.deployed_at, datetime.datetime): 106 deployed_at = self.deployed_at.isoformat() 107 else: 108 deployed_at = self.deployed_at 109 110 field_dict: dict[str, Any] = {} 111 field_dict.update(self.additional_properties) 112 field_dict.update( 113 { 114 "id": id, 115 "name": name, 116 "description": description, 117 "billingAccountId": billing_account_id, 118 "contacts": contacts, 119 "organization": organization, 120 "status": status, 121 "settings": settings, 122 "account": account, 123 "statusMessage": status_message, 124 "tags": tags, 125 "classificationIds": classification_ids, 126 "createdBy": created_by, 127 "createdAt": created_at, 128 "updatedAt": updated_at, 129 } 130 ) 131 if deployed_at is not UNSET: 132 field_dict["deployedAt"] = deployed_at 133 134 return field_dict 135 136 @classmethod 137 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 138 from ..models.cloud_account import CloudAccount 139 from ..models.contact import Contact 140 from ..models.project_settings import ProjectSettings 141 from ..models.tag import Tag 142 143 d = dict(src_dict) 144 id = d.pop("id") 145 146 name = d.pop("name") 147 148 description = d.pop("description") 149 150 billing_account_id = d.pop("billingAccountId") 151 152 contacts = [] 153 _contacts = d.pop("contacts") 154 for contacts_item_data in _contacts: 155 contacts_item = Contact.from_dict(contacts_item_data) 156 157 contacts.append(contacts_item) 158 159 organization = d.pop("organization") 160 161 status = Status(d.pop("status")) 162 163 settings = ProjectSettings.from_dict(d.pop("settings")) 164 165 account = CloudAccount.from_dict(d.pop("account")) 166 167 status_message = d.pop("statusMessage") 168 169 tags = [] 170 _tags = d.pop("tags") 171 for tags_item_data in _tags: 172 tags_item = Tag.from_dict(tags_item_data) 173 174 tags.append(tags_item) 175 176 classification_ids = cast(list[str], d.pop("classificationIds")) 177 178 created_by = d.pop("createdBy") 179 180 created_at = isoparse(d.pop("createdAt")) 181 182 updated_at = isoparse(d.pop("updatedAt")) 183 184 def _parse_deployed_at(data: object) -> datetime.datetime | None | Unset: 185 if data is None: 186 return data 187 if isinstance(data, Unset): 188 return data 189 try: 190 if not isinstance(data, str): 191 raise TypeError() 192 deployed_at_type_0 = isoparse(data) 193 194 return deployed_at_type_0 195 except (TypeError, ValueError, AttributeError, KeyError): 196 pass 197 return cast(datetime.datetime | None | Unset, data) 198 199 deployed_at = _parse_deployed_at(d.pop("deployedAt", UNSET)) 200 201 project_detail = cls( 202 id=id, 203 name=name, 204 description=description, 205 billing_account_id=billing_account_id, 206 contacts=contacts, 207 organization=organization, 208 status=status, 209 settings=settings, 210 account=account, 211 status_message=status_message, 212 tags=tags, 213 classification_ids=classification_ids, 214 created_by=created_by, 215 created_at=created_at, 216 updated_at=updated_at, 217 deployed_at=deployed_at, 218 ) 219 220 project_detail.additional_properties = d 221 return project_detail 222 223 @property 224 def additional_keys(self) -> list[str]: 225 return list(self.additional_properties.keys()) 226 227 def __getitem__(self, key: str) -> Any: 228 return self.additional_properties[key] 229 230 def __setitem__(self, key: str, value: Any) -> None: 231 self.additional_properties[key] = value 232 233 def __delitem__(self, key: str) -> None: 234 del self.additional_properties[key] 235 236 def __contains__(self, key: str) -> bool: 237 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- description (str):
- billing_account_id (str):
- contacts (list[Contact]):
- organization (str):
- status (Status):
- settings (ProjectSettings):
- account (CloudAccount):
- status_message (str):
- tags (list[Tag]):
- classification_ids (list[str]):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- deployed_at (datetime.datetime | None | Unset):
39def __init__(self, id, name, description, billing_account_id, contacts, organization, status, settings, account, status_message, tags, classification_ids, created_by, created_at, updated_at, deployed_at=attr_dict['deployed_at'].default): 40 self.id = id 41 self.name = name 42 self.description = description 43 self.billing_account_id = billing_account_id 44 self.contacts = contacts 45 self.organization = organization 46 self.status = status 47 self.settings = settings 48 self.account = account 49 self.status_message = status_message 50 self.tags = tags 51 self.classification_ids = classification_ids 52 self.created_by = created_by 53 self.created_at = created_at 54 self.updated_at = updated_at 55 self.deployed_at = deployed_at 56 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectDetail.
65 def to_dict(self) -> dict[str, Any]: 66 id = self.id 67 68 name = self.name 69 70 description = self.description 71 72 billing_account_id = self.billing_account_id 73 74 contacts = [] 75 for contacts_item_data in self.contacts: 76 contacts_item = contacts_item_data.to_dict() 77 contacts.append(contacts_item) 78 79 organization = self.organization 80 81 status = self.status.value 82 83 settings = self.settings.to_dict() 84 85 account = self.account.to_dict() 86 87 status_message = self.status_message 88 89 tags = [] 90 for tags_item_data in self.tags: 91 tags_item = tags_item_data.to_dict() 92 tags.append(tags_item) 93 94 classification_ids = self.classification_ids 95 96 created_by = self.created_by 97 98 created_at = self.created_at.isoformat() 99 100 updated_at = self.updated_at.isoformat() 101 102 deployed_at: None | str | Unset 103 if isinstance(self.deployed_at, Unset): 104 deployed_at = UNSET 105 elif isinstance(self.deployed_at, datetime.datetime): 106 deployed_at = self.deployed_at.isoformat() 107 else: 108 deployed_at = self.deployed_at 109 110 field_dict: dict[str, Any] = {} 111 field_dict.update(self.additional_properties) 112 field_dict.update( 113 { 114 "id": id, 115 "name": name, 116 "description": description, 117 "billingAccountId": billing_account_id, 118 "contacts": contacts, 119 "organization": organization, 120 "status": status, 121 "settings": settings, 122 "account": account, 123 "statusMessage": status_message, 124 "tags": tags, 125 "classificationIds": classification_ids, 126 "createdBy": created_by, 127 "createdAt": created_at, 128 "updatedAt": updated_at, 129 } 130 ) 131 if deployed_at is not UNSET: 132 field_dict["deployedAt"] = deployed_at 133 134 return field_dict
136 @classmethod 137 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 138 from ..models.cloud_account import CloudAccount 139 from ..models.contact import Contact 140 from ..models.project_settings import ProjectSettings 141 from ..models.tag import Tag 142 143 d = dict(src_dict) 144 id = d.pop("id") 145 146 name = d.pop("name") 147 148 description = d.pop("description") 149 150 billing_account_id = d.pop("billingAccountId") 151 152 contacts = [] 153 _contacts = d.pop("contacts") 154 for contacts_item_data in _contacts: 155 contacts_item = Contact.from_dict(contacts_item_data) 156 157 contacts.append(contacts_item) 158 159 organization = d.pop("organization") 160 161 status = Status(d.pop("status")) 162 163 settings = ProjectSettings.from_dict(d.pop("settings")) 164 165 account = CloudAccount.from_dict(d.pop("account")) 166 167 status_message = d.pop("statusMessage") 168 169 tags = [] 170 _tags = d.pop("tags") 171 for tags_item_data in _tags: 172 tags_item = Tag.from_dict(tags_item_data) 173 174 tags.append(tags_item) 175 176 classification_ids = cast(list[str], d.pop("classificationIds")) 177 178 created_by = d.pop("createdBy") 179 180 created_at = isoparse(d.pop("createdAt")) 181 182 updated_at = isoparse(d.pop("updatedAt")) 183 184 def _parse_deployed_at(data: object) -> datetime.datetime | None | Unset: 185 if data is None: 186 return data 187 if isinstance(data, Unset): 188 return data 189 try: 190 if not isinstance(data, str): 191 raise TypeError() 192 deployed_at_type_0 = isoparse(data) 193 194 return deployed_at_type_0 195 except (TypeError, ValueError, AttributeError, KeyError): 196 pass 197 return cast(datetime.datetime | None | Unset, data) 198 199 deployed_at = _parse_deployed_at(d.pop("deployedAt", UNSET)) 200 201 project_detail = cls( 202 id=id, 203 name=name, 204 description=description, 205 billing_account_id=billing_account_id, 206 contacts=contacts, 207 organization=organization, 208 status=status, 209 settings=settings, 210 account=account, 211 status_message=status_message, 212 tags=tags, 213 classification_ids=classification_ids, 214 created_by=created_by, 215 created_at=created_at, 216 updated_at=updated_at, 217 deployed_at=deployed_at, 218 ) 219 220 project_detail.additional_properties = d 221 return project_detail
16@_attrs_define 17class ProjectFileAccessRequest: 18 """ 19 Attributes: 20 access_type (ProjectAccessType): 21 dataset_id (None | str | Unset): 22 token_lifetime_hours (int | None | Unset): 23 """ 24 25 access_type: ProjectAccessType 26 dataset_id: None | str | Unset = UNSET 27 token_lifetime_hours: int | None | Unset = UNSET 28 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 29 30 def to_dict(self) -> dict[str, Any]: 31 access_type = self.access_type.value 32 33 dataset_id: None | str | Unset 34 if isinstance(self.dataset_id, Unset): 35 dataset_id = UNSET 36 else: 37 dataset_id = self.dataset_id 38 39 token_lifetime_hours: int | None | Unset 40 if isinstance(self.token_lifetime_hours, Unset): 41 token_lifetime_hours = UNSET 42 else: 43 token_lifetime_hours = self.token_lifetime_hours 44 45 field_dict: dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "accessType": access_type, 50 } 51 ) 52 if dataset_id is not UNSET: 53 field_dict["datasetId"] = dataset_id 54 if token_lifetime_hours is not UNSET: 55 field_dict["tokenLifetimeHours"] = token_lifetime_hours 56 57 return field_dict 58 59 @classmethod 60 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 61 d = dict(src_dict) 62 access_type = ProjectAccessType(d.pop("accessType")) 63 64 def _parse_dataset_id(data: object) -> None | str | Unset: 65 if data is None: 66 return data 67 if isinstance(data, Unset): 68 return data 69 return cast(None | str | Unset, data) 70 71 dataset_id = _parse_dataset_id(d.pop("datasetId", UNSET)) 72 73 def _parse_token_lifetime_hours(data: object) -> int | None | Unset: 74 if data is None: 75 return data 76 if isinstance(data, Unset): 77 return data 78 return cast(int | None | Unset, data) 79 80 token_lifetime_hours = _parse_token_lifetime_hours(d.pop("tokenLifetimeHours", UNSET)) 81 82 project_file_access_request = cls( 83 access_type=access_type, 84 dataset_id=dataset_id, 85 token_lifetime_hours=token_lifetime_hours, 86 ) 87 88 project_file_access_request.additional_properties = d 89 return project_file_access_request 90 91 @property 92 def additional_keys(self) -> list[str]: 93 return list(self.additional_properties.keys()) 94 95 def __getitem__(self, key: str) -> Any: 96 return self.additional_properties[key] 97 98 def __setitem__(self, key: str, value: Any) -> None: 99 self.additional_properties[key] = value 100 101 def __delitem__(self, key: str) -> None: 102 del self.additional_properties[key] 103 104 def __contains__(self, key: str) -> bool: 105 return key in self.additional_properties
Attributes:
- access_type (ProjectAccessType):
- dataset_id (None | str | Unset):
- token_lifetime_hours (int | None | Unset):
26def __init__(self, access_type, dataset_id=attr_dict['dataset_id'].default, token_lifetime_hours=attr_dict['token_lifetime_hours'].default): 27 self.access_type = access_type 28 self.dataset_id = dataset_id 29 self.token_lifetime_hours = token_lifetime_hours 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectFileAccessRequest.
30 def to_dict(self) -> dict[str, Any]: 31 access_type = self.access_type.value 32 33 dataset_id: None | str | Unset 34 if isinstance(self.dataset_id, Unset): 35 dataset_id = UNSET 36 else: 37 dataset_id = self.dataset_id 38 39 token_lifetime_hours: int | None | Unset 40 if isinstance(self.token_lifetime_hours, Unset): 41 token_lifetime_hours = UNSET 42 else: 43 token_lifetime_hours = self.token_lifetime_hours 44 45 field_dict: dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "accessType": access_type, 50 } 51 ) 52 if dataset_id is not UNSET: 53 field_dict["datasetId"] = dataset_id 54 if token_lifetime_hours is not UNSET: 55 field_dict["tokenLifetimeHours"] = token_lifetime_hours 56 57 return field_dict
59 @classmethod 60 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 61 d = dict(src_dict) 62 access_type = ProjectAccessType(d.pop("accessType")) 63 64 def _parse_dataset_id(data: object) -> None | str | Unset: 65 if data is None: 66 return data 67 if isinstance(data, Unset): 68 return data 69 return cast(None | str | Unset, data) 70 71 dataset_id = _parse_dataset_id(d.pop("datasetId", UNSET)) 72 73 def _parse_token_lifetime_hours(data: object) -> int | None | Unset: 74 if data is None: 75 return data 76 if isinstance(data, Unset): 77 return data 78 return cast(int | None | Unset, data) 79 80 token_lifetime_hours = _parse_token_lifetime_hours(d.pop("tokenLifetimeHours", UNSET)) 81 82 project_file_access_request = cls( 83 access_type=access_type, 84 dataset_id=dataset_id, 85 token_lifetime_hours=token_lifetime_hours, 86 ) 87 88 project_file_access_request.additional_properties = d 89 return project_file_access_request
22@_attrs_define 23class ProjectInput: 24 """ 25 Attributes: 26 name (str): 27 description (str): 28 billing_account_id (str): 29 settings (ProjectSettings): 30 contacts (list[Contact]): 31 account (CloudAccount | None | Unset): 32 classification_ids (list[str] | None | Unset): 33 tags (list[Tag] | None | Unset): 34 """ 35 36 name: str 37 description: str 38 billing_account_id: str 39 settings: ProjectSettings 40 contacts: list[Contact] 41 account: CloudAccount | None | Unset = UNSET 42 classification_ids: list[str] | None | Unset = UNSET 43 tags: list[Tag] | None | Unset = UNSET 44 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 45 46 def to_dict(self) -> dict[str, Any]: 47 from ..models.cloud_account import CloudAccount 48 49 name = self.name 50 51 description = self.description 52 53 billing_account_id = self.billing_account_id 54 55 settings = self.settings.to_dict() 56 57 contacts = [] 58 for contacts_item_data in self.contacts: 59 contacts_item = contacts_item_data.to_dict() 60 contacts.append(contacts_item) 61 62 account: dict[str, Any] | None | Unset 63 if isinstance(self.account, Unset): 64 account = UNSET 65 elif isinstance(self.account, CloudAccount): 66 account = self.account.to_dict() 67 else: 68 account = self.account 69 70 classification_ids: list[str] | None | Unset 71 if isinstance(self.classification_ids, Unset): 72 classification_ids = UNSET 73 elif isinstance(self.classification_ids, list): 74 classification_ids = self.classification_ids 75 76 else: 77 classification_ids = self.classification_ids 78 79 tags: list[dict[str, Any]] | None | Unset 80 if isinstance(self.tags, Unset): 81 tags = UNSET 82 elif isinstance(self.tags, list): 83 tags = [] 84 for tags_type_0_item_data in self.tags: 85 tags_type_0_item = tags_type_0_item_data.to_dict() 86 tags.append(tags_type_0_item) 87 88 else: 89 tags = self.tags 90 91 field_dict: dict[str, Any] = {} 92 field_dict.update(self.additional_properties) 93 field_dict.update( 94 { 95 "name": name, 96 "description": description, 97 "billingAccountId": billing_account_id, 98 "settings": settings, 99 "contacts": contacts, 100 } 101 ) 102 if account is not UNSET: 103 field_dict["account"] = account 104 if classification_ids is not UNSET: 105 field_dict["classificationIds"] = classification_ids 106 if tags is not UNSET: 107 field_dict["tags"] = tags 108 109 return field_dict 110 111 @classmethod 112 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 113 from ..models.cloud_account import CloudAccount 114 from ..models.contact import Contact 115 from ..models.project_settings import ProjectSettings 116 from ..models.tag import Tag 117 118 d = dict(src_dict) 119 name = d.pop("name") 120 121 description = d.pop("description") 122 123 billing_account_id = d.pop("billingAccountId") 124 125 settings = ProjectSettings.from_dict(d.pop("settings")) 126 127 contacts = [] 128 _contacts = d.pop("contacts") 129 for contacts_item_data in _contacts: 130 contacts_item = Contact.from_dict(contacts_item_data) 131 132 contacts.append(contacts_item) 133 134 def _parse_account(data: object) -> CloudAccount | None | Unset: 135 if data is None: 136 return data 137 if isinstance(data, Unset): 138 return data 139 try: 140 if not isinstance(data, dict): 141 raise TypeError() 142 account_type_1 = CloudAccount.from_dict(data) 143 144 return account_type_1 145 except (TypeError, ValueError, AttributeError, KeyError): 146 pass 147 return cast(CloudAccount | None | Unset, data) 148 149 account = _parse_account(d.pop("account", UNSET)) 150 151 def _parse_classification_ids(data: object) -> list[str] | None | Unset: 152 if data is None: 153 return data 154 if isinstance(data, Unset): 155 return data 156 try: 157 if not isinstance(data, list): 158 raise TypeError() 159 classification_ids_type_0 = cast(list[str], data) 160 161 return classification_ids_type_0 162 except (TypeError, ValueError, AttributeError, KeyError): 163 pass 164 return cast(list[str] | None | Unset, data) 165 166 classification_ids = _parse_classification_ids(d.pop("classificationIds", UNSET)) 167 168 def _parse_tags(data: object) -> list[Tag] | None | Unset: 169 if data is None: 170 return data 171 if isinstance(data, Unset): 172 return data 173 try: 174 if not isinstance(data, list): 175 raise TypeError() 176 tags_type_0 = [] 177 _tags_type_0 = data 178 for tags_type_0_item_data in _tags_type_0: 179 tags_type_0_item = Tag.from_dict(tags_type_0_item_data) 180 181 tags_type_0.append(tags_type_0_item) 182 183 return tags_type_0 184 except (TypeError, ValueError, AttributeError, KeyError): 185 pass 186 return cast(list[Tag] | None | Unset, data) 187 188 tags = _parse_tags(d.pop("tags", UNSET)) 189 190 project_input = cls( 191 name=name, 192 description=description, 193 billing_account_id=billing_account_id, 194 settings=settings, 195 contacts=contacts, 196 account=account, 197 classification_ids=classification_ids, 198 tags=tags, 199 ) 200 201 project_input.additional_properties = d 202 return project_input 203 204 @property 205 def additional_keys(self) -> list[str]: 206 return list(self.additional_properties.keys()) 207 208 def __getitem__(self, key: str) -> Any: 209 return self.additional_properties[key] 210 211 def __setitem__(self, key: str, value: Any) -> None: 212 self.additional_properties[key] = value 213 214 def __delitem__(self, key: str) -> None: 215 del self.additional_properties[key] 216 217 def __contains__(self, key: str) -> bool: 218 return key in self.additional_properties
Attributes:
- name (str):
- description (str):
- billing_account_id (str):
- settings (ProjectSettings):
- contacts (list[Contact]):
- account (CloudAccount | None | Unset):
- classification_ids (list[str] | None | Unset):
- tags (list[Tag] | None | Unset):
31def __init__(self, name, description, billing_account_id, settings, contacts, account=attr_dict['account'].default, classification_ids=attr_dict['classification_ids'].default, tags=attr_dict['tags'].default): 32 self.name = name 33 self.description = description 34 self.billing_account_id = billing_account_id 35 self.settings = settings 36 self.contacts = contacts 37 self.account = account 38 self.classification_ids = classification_ids 39 self.tags = tags 40 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectInput.
46 def to_dict(self) -> dict[str, Any]: 47 from ..models.cloud_account import CloudAccount 48 49 name = self.name 50 51 description = self.description 52 53 billing_account_id = self.billing_account_id 54 55 settings = self.settings.to_dict() 56 57 contacts = [] 58 for contacts_item_data in self.contacts: 59 contacts_item = contacts_item_data.to_dict() 60 contacts.append(contacts_item) 61 62 account: dict[str, Any] | None | Unset 63 if isinstance(self.account, Unset): 64 account = UNSET 65 elif isinstance(self.account, CloudAccount): 66 account = self.account.to_dict() 67 else: 68 account = self.account 69 70 classification_ids: list[str] | None | Unset 71 if isinstance(self.classification_ids, Unset): 72 classification_ids = UNSET 73 elif isinstance(self.classification_ids, list): 74 classification_ids = self.classification_ids 75 76 else: 77 classification_ids = self.classification_ids 78 79 tags: list[dict[str, Any]] | None | Unset 80 if isinstance(self.tags, Unset): 81 tags = UNSET 82 elif isinstance(self.tags, list): 83 tags = [] 84 for tags_type_0_item_data in self.tags: 85 tags_type_0_item = tags_type_0_item_data.to_dict() 86 tags.append(tags_type_0_item) 87 88 else: 89 tags = self.tags 90 91 field_dict: dict[str, Any] = {} 92 field_dict.update(self.additional_properties) 93 field_dict.update( 94 { 95 "name": name, 96 "description": description, 97 "billingAccountId": billing_account_id, 98 "settings": settings, 99 "contacts": contacts, 100 } 101 ) 102 if account is not UNSET: 103 field_dict["account"] = account 104 if classification_ids is not UNSET: 105 field_dict["classificationIds"] = classification_ids 106 if tags is not UNSET: 107 field_dict["tags"] = tags 108 109 return field_dict
111 @classmethod 112 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 113 from ..models.cloud_account import CloudAccount 114 from ..models.contact import Contact 115 from ..models.project_settings import ProjectSettings 116 from ..models.tag import Tag 117 118 d = dict(src_dict) 119 name = d.pop("name") 120 121 description = d.pop("description") 122 123 billing_account_id = d.pop("billingAccountId") 124 125 settings = ProjectSettings.from_dict(d.pop("settings")) 126 127 contacts = [] 128 _contacts = d.pop("contacts") 129 for contacts_item_data in _contacts: 130 contacts_item = Contact.from_dict(contacts_item_data) 131 132 contacts.append(contacts_item) 133 134 def _parse_account(data: object) -> CloudAccount | None | Unset: 135 if data is None: 136 return data 137 if isinstance(data, Unset): 138 return data 139 try: 140 if not isinstance(data, dict): 141 raise TypeError() 142 account_type_1 = CloudAccount.from_dict(data) 143 144 return account_type_1 145 except (TypeError, ValueError, AttributeError, KeyError): 146 pass 147 return cast(CloudAccount | None | Unset, data) 148 149 account = _parse_account(d.pop("account", UNSET)) 150 151 def _parse_classification_ids(data: object) -> list[str] | None | Unset: 152 if data is None: 153 return data 154 if isinstance(data, Unset): 155 return data 156 try: 157 if not isinstance(data, list): 158 raise TypeError() 159 classification_ids_type_0 = cast(list[str], data) 160 161 return classification_ids_type_0 162 except (TypeError, ValueError, AttributeError, KeyError): 163 pass 164 return cast(list[str] | None | Unset, data) 165 166 classification_ids = _parse_classification_ids(d.pop("classificationIds", UNSET)) 167 168 def _parse_tags(data: object) -> list[Tag] | None | Unset: 169 if data is None: 170 return data 171 if isinstance(data, Unset): 172 return data 173 try: 174 if not isinstance(data, list): 175 raise TypeError() 176 tags_type_0 = [] 177 _tags_type_0 = data 178 for tags_type_0_item_data in _tags_type_0: 179 tags_type_0_item = Tag.from_dict(tags_type_0_item_data) 180 181 tags_type_0.append(tags_type_0_item) 182 183 return tags_type_0 184 except (TypeError, ValueError, AttributeError, KeyError): 185 pass 186 return cast(list[Tag] | None | Unset, data) 187 188 tags = _parse_tags(d.pop("tags", UNSET)) 189 190 project_input = cls( 191 name=name, 192 description=description, 193 billing_account_id=billing_account_id, 194 settings=settings, 195 contacts=contacts, 196 account=account, 197 classification_ids=classification_ids, 198 tags=tags, 199 ) 200 201 project_input.additional_properties = d 202 return project_input
19@_attrs_define 20class ProjectMetrics: 21 """ 22 Attributes: 23 project_id (str): 24 costs (list[MetricRecord] | Unset): Costs by service by month Example: [{'date': datetime.date(2022, 11, 1), 25 'unit': '$', 'service': {'Other': 26.47, 'EC2 - Other': 3.66, 'Amazon Elastic Compute Cloud - Compute': 140.59, 26 'Amazon Simple Storage Service': 24.91, 'AmazonCloudWatch': 2.09}}]. 27 storage_metrics (list[MetricRecord] | Unset): Storage usage by tier by day Example: [{'date': 28 datetime.date(2023, 12, 12), 'unit': 'GB', 'service': {'IntelligentTieringAIAStorage': 4198.95, 29 'IntelligentTieringFAStorage': 1516.48, 'StandardStorage': 1.9, 'IntelligentTieringIAStorage': 2154.6}}]. 30 """ 31 32 project_id: str 33 costs: list[MetricRecord] | Unset = UNSET 34 storage_metrics: list[MetricRecord] | Unset = UNSET 35 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 36 37 def to_dict(self) -> dict[str, Any]: 38 project_id = self.project_id 39 40 costs: list[dict[str, Any]] | Unset = UNSET 41 if not isinstance(self.costs, Unset): 42 costs = [] 43 for costs_item_data in self.costs: 44 costs_item = costs_item_data.to_dict() 45 costs.append(costs_item) 46 47 storage_metrics: list[dict[str, Any]] | Unset = UNSET 48 if not isinstance(self.storage_metrics, Unset): 49 storage_metrics = [] 50 for storage_metrics_item_data in self.storage_metrics: 51 storage_metrics_item = storage_metrics_item_data.to_dict() 52 storage_metrics.append(storage_metrics_item) 53 54 field_dict: dict[str, Any] = {} 55 field_dict.update(self.additional_properties) 56 field_dict.update( 57 { 58 "projectId": project_id, 59 } 60 ) 61 if costs is not UNSET: 62 field_dict["costs"] = costs 63 if storage_metrics is not UNSET: 64 field_dict["storageMetrics"] = storage_metrics 65 66 return field_dict 67 68 @classmethod 69 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 70 from ..models.metric_record import MetricRecord 71 72 d = dict(src_dict) 73 project_id = d.pop("projectId") 74 75 _costs = d.pop("costs", UNSET) 76 costs: list[MetricRecord] | Unset = UNSET 77 if _costs is not UNSET: 78 costs = [] 79 for costs_item_data in _costs: 80 costs_item = MetricRecord.from_dict(costs_item_data) 81 82 costs.append(costs_item) 83 84 _storage_metrics = d.pop("storageMetrics", UNSET) 85 storage_metrics: list[MetricRecord] | Unset = UNSET 86 if _storage_metrics is not UNSET: 87 storage_metrics = [] 88 for storage_metrics_item_data in _storage_metrics: 89 storage_metrics_item = MetricRecord.from_dict(storage_metrics_item_data) 90 91 storage_metrics.append(storage_metrics_item) 92 93 project_metrics = cls( 94 project_id=project_id, 95 costs=costs, 96 storage_metrics=storage_metrics, 97 ) 98 99 project_metrics.additional_properties = d 100 return project_metrics 101 102 @property 103 def additional_keys(self) -> list[str]: 104 return list(self.additional_properties.keys()) 105 106 def __getitem__(self, key: str) -> Any: 107 return self.additional_properties[key] 108 109 def __setitem__(self, key: str, value: Any) -> None: 110 self.additional_properties[key] = value 111 112 def __delitem__(self, key: str) -> None: 113 del self.additional_properties[key] 114 115 def __contains__(self, key: str) -> bool: 116 return key in self.additional_properties
Attributes:
- project_id (str):
- costs (list[MetricRecord] | Unset): Costs by service by month Example: [{'date': datetime.date(2022, 11, 1), 'unit': '$', 'service': {'Other': 26.47, 'EC2 - Other': 3.66, 'Amazon Elastic Compute Cloud - Compute': 140.59, 'Amazon Simple Storage Service': 24.91, 'AmazonCloudWatch': 2.09}}].
- storage_metrics (list[MetricRecord] | Unset): Storage usage by tier by day Example: [{'date': datetime.date(2023, 12, 12), 'unit': 'GB', 'service': {'IntelligentTieringAIAStorage': 4198.95, 'IntelligentTieringFAStorage': 1516.48, 'StandardStorage': 1.9, 'IntelligentTieringIAStorage': 2154.6}}].
26def __init__(self, project_id, costs=attr_dict['costs'].default, storage_metrics=attr_dict['storage_metrics'].default): 27 self.project_id = project_id 28 self.costs = costs 29 self.storage_metrics = storage_metrics 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectMetrics.
37 def to_dict(self) -> dict[str, Any]: 38 project_id = self.project_id 39 40 costs: list[dict[str, Any]] | Unset = UNSET 41 if not isinstance(self.costs, Unset): 42 costs = [] 43 for costs_item_data in self.costs: 44 costs_item = costs_item_data.to_dict() 45 costs.append(costs_item) 46 47 storage_metrics: list[dict[str, Any]] | Unset = UNSET 48 if not isinstance(self.storage_metrics, Unset): 49 storage_metrics = [] 50 for storage_metrics_item_data in self.storage_metrics: 51 storage_metrics_item = storage_metrics_item_data.to_dict() 52 storage_metrics.append(storage_metrics_item) 53 54 field_dict: dict[str, Any] = {} 55 field_dict.update(self.additional_properties) 56 field_dict.update( 57 { 58 "projectId": project_id, 59 } 60 ) 61 if costs is not UNSET: 62 field_dict["costs"] = costs 63 if storage_metrics is not UNSET: 64 field_dict["storageMetrics"] = storage_metrics 65 66 return field_dict
68 @classmethod 69 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 70 from ..models.metric_record import MetricRecord 71 72 d = dict(src_dict) 73 project_id = d.pop("projectId") 74 75 _costs = d.pop("costs", UNSET) 76 costs: list[MetricRecord] | Unset = UNSET 77 if _costs is not UNSET: 78 costs = [] 79 for costs_item_data in _costs: 80 costs_item = MetricRecord.from_dict(costs_item_data) 81 82 costs.append(costs_item) 83 84 _storage_metrics = d.pop("storageMetrics", UNSET) 85 storage_metrics: list[MetricRecord] | Unset = UNSET 86 if _storage_metrics is not UNSET: 87 storage_metrics = [] 88 for storage_metrics_item_data in _storage_metrics: 89 storage_metrics_item = MetricRecord.from_dict(storage_metrics_item_data) 90 91 storage_metrics.append(storage_metrics_item) 92 93 project_metrics = cls( 94 project_id=project_id, 95 costs=costs, 96 storage_metrics=storage_metrics, 97 ) 98 99 project_metrics.additional_properties = d 100 return project_metrics
13@_attrs_define 14class ProjectRequest: 15 """ 16 Attributes: 17 name (str): 18 description (str): 19 classification_ids (list[str]): 20 billing_info (str): 21 admin_username (str): 22 message (str): 23 """ 24 25 name: str 26 description: str 27 classification_ids: list[str] 28 billing_info: str 29 admin_username: str 30 message: str 31 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 32 33 def to_dict(self) -> dict[str, Any]: 34 name = self.name 35 36 description = self.description 37 38 classification_ids = self.classification_ids 39 40 billing_info = self.billing_info 41 42 admin_username = self.admin_username 43 44 message = self.message 45 46 field_dict: dict[str, Any] = {} 47 field_dict.update(self.additional_properties) 48 field_dict.update( 49 { 50 "name": name, 51 "description": description, 52 "classificationIds": classification_ids, 53 "billingInfo": billing_info, 54 "adminUsername": admin_username, 55 "message": message, 56 } 57 ) 58 59 return field_dict 60 61 @classmethod 62 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 63 d = dict(src_dict) 64 name = d.pop("name") 65 66 description = d.pop("description") 67 68 classification_ids = cast(list[str], d.pop("classificationIds")) 69 70 billing_info = d.pop("billingInfo") 71 72 admin_username = d.pop("adminUsername") 73 74 message = d.pop("message") 75 76 project_request = cls( 77 name=name, 78 description=description, 79 classification_ids=classification_ids, 80 billing_info=billing_info, 81 admin_username=admin_username, 82 message=message, 83 ) 84 85 project_request.additional_properties = d 86 return project_request 87 88 @property 89 def additional_keys(self) -> list[str]: 90 return list(self.additional_properties.keys()) 91 92 def __getitem__(self, key: str) -> Any: 93 return self.additional_properties[key] 94 95 def __setitem__(self, key: str, value: Any) -> None: 96 self.additional_properties[key] = value 97 98 def __delitem__(self, key: str) -> None: 99 del self.additional_properties[key] 100 101 def __contains__(self, key: str) -> bool: 102 return key in self.additional_properties
Attributes:
- name (str):
- description (str):
- classification_ids (list[str]):
- billing_info (str):
- admin_username (str):
- message (str):
29def __init__(self, name, description, classification_ids, billing_info, admin_username, message): 30 self.name = name 31 self.description = description 32 self.classification_ids = classification_ids 33 self.billing_info = billing_info 34 self.admin_username = admin_username 35 self.message = message 36 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectRequest.
33 def to_dict(self) -> dict[str, Any]: 34 name = self.name 35 36 description = self.description 37 38 classification_ids = self.classification_ids 39 40 billing_info = self.billing_info 41 42 admin_username = self.admin_username 43 44 message = self.message 45 46 field_dict: dict[str, Any] = {} 47 field_dict.update(self.additional_properties) 48 field_dict.update( 49 { 50 "name": name, 51 "description": description, 52 "classificationIds": classification_ids, 53 "billingInfo": billing_info, 54 "adminUsername": admin_username, 55 "message": message, 56 } 57 ) 58 59 return field_dict
61 @classmethod 62 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 63 d = dict(src_dict) 64 name = d.pop("name") 65 66 description = d.pop("description") 67 68 classification_ids = cast(list[str], d.pop("classificationIds")) 69 70 billing_info = d.pop("billingInfo") 71 72 admin_username = d.pop("adminUsername") 73 74 message = d.pop("message") 75 76 project_request = cls( 77 name=name, 78 description=description, 79 classification_ids=classification_ids, 80 billing_info=billing_info, 81 admin_username=admin_username, 82 message=message, 83 ) 84 85 project_request.additional_properties = d 86 return project_request
26@_attrs_define 27class ProjectRequirement: 28 """ 29 Attributes: 30 id (str): The unique identifier for the requirement 31 name (str): The name of the requirement 32 description (str): A brief description of the requirement 33 type_ (GovernanceType): The types of governance requirements that can be enforced 34 path (str): S3 prefix where the main file for the requirement is saved 35 supplemental_path (str): S3 prefix where supplemental files for the requirement are saved 36 scope (GovernanceScope): The levels at which governance requirements can be enforced 37 contacts (list[GovernanceContact]): The governance contacts assigned to the requirement. 38 is_enacted (bool): Whether the requirement is past the enactment date 39 is_project_configured (bool): A requirement is project configured if it was created by the tenant but needs a 40 file uploaded by the project 41 is_fulfilled (bool): Whether the current user has fulfilled the requirement for this project 42 acceptance (GovernanceScope | None | Unset): Specifies the level at which it is satisfied 43 enactment_date (datetime.datetime | None | Unset): The date of enactment for the requirement 44 expiration_type (GovernanceExpiryType | Unset): The expiry conditions that can be applied to governance 45 requirements. 46 expiration_days_after_completion (int | None | Unset): The number of days for a relative to completion 47 expiration 48 expiration_date (datetime.datetime | None | Unset): The date of expiration for the requirement 49 supplemental_docs (list[GovernanceFile] | None | Unset): Optional files with extra information, e.g. templates 50 for documents, links, etc 51 file (GovernanceFile | None | Unset): 52 authorship (GovernanceScope | None | Unset): Who needs to supply the agreement document 53 verification_method (GovernanceTrainingVerification | None | Unset): The value indicating how the completion of 54 the training is verified. 55 fulfillment_id (None | str | Unset): The id for the requirement fulfillment 56 fulfillment_date (datetime.datetime | None | Unset): The date the requirement was fulfilled by the user 57 fulfillment_file (None | str | Unset): The optional file uploaded to fulfill the requirement 58 fulfillment_path (None | str | Unset): The path to the optional fulfillment file 59 requires_user_fulfillment (bool | Unset): Whether this requirement requires the user to fulfill (it is active, 60 requires fulfillment, and user has not fulfilled 61 """ 62 63 id: str 64 name: str 65 description: str 66 type_: GovernanceType 67 path: str 68 supplemental_path: str 69 scope: GovernanceScope 70 contacts: list[GovernanceContact] 71 is_enacted: bool 72 is_project_configured: bool 73 is_fulfilled: bool 74 acceptance: GovernanceScope | None | Unset = UNSET 75 enactment_date: datetime.datetime | None | Unset = UNSET 76 expiration_type: GovernanceExpiryType | Unset = UNSET 77 expiration_days_after_completion: int | None | Unset = UNSET 78 expiration_date: datetime.datetime | None | Unset = UNSET 79 supplemental_docs: list[GovernanceFile] | None | Unset = UNSET 80 file: GovernanceFile | None | Unset = UNSET 81 authorship: GovernanceScope | None | Unset = UNSET 82 verification_method: GovernanceTrainingVerification | None | Unset = UNSET 83 fulfillment_id: None | str | Unset = UNSET 84 fulfillment_date: datetime.datetime | None | Unset = UNSET 85 fulfillment_file: None | str | Unset = UNSET 86 fulfillment_path: None | str | Unset = UNSET 87 requires_user_fulfillment: bool | Unset = UNSET 88 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 89 90 def to_dict(self) -> dict[str, Any]: 91 from ..models.governance_file import GovernanceFile 92 93 id = self.id 94 95 name = self.name 96 97 description = self.description 98 99 type_ = self.type_.value 100 101 path = self.path 102 103 supplemental_path = self.supplemental_path 104 105 scope = self.scope.value 106 107 contacts = [] 108 for contacts_item_data in self.contacts: 109 contacts_item = contacts_item_data.to_dict() 110 contacts.append(contacts_item) 111 112 is_enacted = self.is_enacted 113 114 is_project_configured = self.is_project_configured 115 116 is_fulfilled = self.is_fulfilled 117 118 acceptance: None | str | Unset 119 if isinstance(self.acceptance, Unset): 120 acceptance = UNSET 121 elif isinstance(self.acceptance, GovernanceScope): 122 acceptance = self.acceptance.value 123 else: 124 acceptance = self.acceptance 125 126 enactment_date: None | str | Unset 127 if isinstance(self.enactment_date, Unset): 128 enactment_date = UNSET 129 elif isinstance(self.enactment_date, datetime.datetime): 130 enactment_date = self.enactment_date.isoformat() 131 else: 132 enactment_date = self.enactment_date 133 134 expiration_type: str | Unset = UNSET 135 if not isinstance(self.expiration_type, Unset): 136 expiration_type = self.expiration_type.value 137 138 expiration_days_after_completion: int | None | Unset 139 if isinstance(self.expiration_days_after_completion, Unset): 140 expiration_days_after_completion = UNSET 141 else: 142 expiration_days_after_completion = self.expiration_days_after_completion 143 144 expiration_date: None | str | Unset 145 if isinstance(self.expiration_date, Unset): 146 expiration_date = UNSET 147 elif isinstance(self.expiration_date, datetime.datetime): 148 expiration_date = self.expiration_date.isoformat() 149 else: 150 expiration_date = self.expiration_date 151 152 supplemental_docs: list[dict[str, Any]] | None | Unset 153 if isinstance(self.supplemental_docs, Unset): 154 supplemental_docs = UNSET 155 elif isinstance(self.supplemental_docs, list): 156 supplemental_docs = [] 157 for supplemental_docs_type_0_item_data in self.supplemental_docs: 158 supplemental_docs_type_0_item = supplemental_docs_type_0_item_data.to_dict() 159 supplemental_docs.append(supplemental_docs_type_0_item) 160 161 else: 162 supplemental_docs = self.supplemental_docs 163 164 file: dict[str, Any] | None | Unset 165 if isinstance(self.file, Unset): 166 file = UNSET 167 elif isinstance(self.file, GovernanceFile): 168 file = self.file.to_dict() 169 else: 170 file = self.file 171 172 authorship: None | str | Unset 173 if isinstance(self.authorship, Unset): 174 authorship = UNSET 175 elif isinstance(self.authorship, GovernanceScope): 176 authorship = self.authorship.value 177 else: 178 authorship = self.authorship 179 180 verification_method: None | str | Unset 181 if isinstance(self.verification_method, Unset): 182 verification_method = UNSET 183 elif isinstance(self.verification_method, GovernanceTrainingVerification): 184 verification_method = self.verification_method.value 185 else: 186 verification_method = self.verification_method 187 188 fulfillment_id: None | str | Unset 189 if isinstance(self.fulfillment_id, Unset): 190 fulfillment_id = UNSET 191 else: 192 fulfillment_id = self.fulfillment_id 193 194 fulfillment_date: None | str | Unset 195 if isinstance(self.fulfillment_date, Unset): 196 fulfillment_date = UNSET 197 elif isinstance(self.fulfillment_date, datetime.datetime): 198 fulfillment_date = self.fulfillment_date.isoformat() 199 else: 200 fulfillment_date = self.fulfillment_date 201 202 fulfillment_file: None | str | Unset 203 if isinstance(self.fulfillment_file, Unset): 204 fulfillment_file = UNSET 205 else: 206 fulfillment_file = self.fulfillment_file 207 208 fulfillment_path: None | str | Unset 209 if isinstance(self.fulfillment_path, Unset): 210 fulfillment_path = UNSET 211 else: 212 fulfillment_path = self.fulfillment_path 213 214 requires_user_fulfillment = self.requires_user_fulfillment 215 216 field_dict: dict[str, Any] = {} 217 field_dict.update(self.additional_properties) 218 field_dict.update( 219 { 220 "id": id, 221 "name": name, 222 "description": description, 223 "type": type_, 224 "path": path, 225 "supplementalPath": supplemental_path, 226 "scope": scope, 227 "contacts": contacts, 228 "isEnacted": is_enacted, 229 "isProjectConfigured": is_project_configured, 230 "isFulfilled": is_fulfilled, 231 } 232 ) 233 if acceptance is not UNSET: 234 field_dict["acceptance"] = acceptance 235 if enactment_date is not UNSET: 236 field_dict["enactmentDate"] = enactment_date 237 if expiration_type is not UNSET: 238 field_dict["expirationType"] = expiration_type 239 if expiration_days_after_completion is not UNSET: 240 field_dict["expirationDaysAfterCompletion"] = expiration_days_after_completion 241 if expiration_date is not UNSET: 242 field_dict["expirationDate"] = expiration_date 243 if supplemental_docs is not UNSET: 244 field_dict["supplementalDocs"] = supplemental_docs 245 if file is not UNSET: 246 field_dict["file"] = file 247 if authorship is not UNSET: 248 field_dict["authorship"] = authorship 249 if verification_method is not UNSET: 250 field_dict["verificationMethod"] = verification_method 251 if fulfillment_id is not UNSET: 252 field_dict["fulfillmentId"] = fulfillment_id 253 if fulfillment_date is not UNSET: 254 field_dict["fulfillmentDate"] = fulfillment_date 255 if fulfillment_file is not UNSET: 256 field_dict["fulfillmentFile"] = fulfillment_file 257 if fulfillment_path is not UNSET: 258 field_dict["fulfillmentPath"] = fulfillment_path 259 if requires_user_fulfillment is not UNSET: 260 field_dict["requiresUserFulfillment"] = requires_user_fulfillment 261 262 return field_dict 263 264 @classmethod 265 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 266 from ..models.governance_contact import GovernanceContact 267 from ..models.governance_file import GovernanceFile 268 269 d = dict(src_dict) 270 id = d.pop("id") 271 272 name = d.pop("name") 273 274 description = d.pop("description") 275 276 type_ = GovernanceType(d.pop("type")) 277 278 path = d.pop("path") 279 280 supplemental_path = d.pop("supplementalPath") 281 282 scope = GovernanceScope(d.pop("scope")) 283 284 contacts = [] 285 _contacts = d.pop("contacts") 286 for contacts_item_data in _contacts: 287 contacts_item = GovernanceContact.from_dict(contacts_item_data) 288 289 contacts.append(contacts_item) 290 291 is_enacted = d.pop("isEnacted") 292 293 is_project_configured = d.pop("isProjectConfigured") 294 295 is_fulfilled = d.pop("isFulfilled") 296 297 def _parse_acceptance(data: object) -> GovernanceScope | None | Unset: 298 if data is None: 299 return data 300 if isinstance(data, Unset): 301 return data 302 try: 303 if not isinstance(data, str): 304 raise TypeError() 305 acceptance_type_1 = GovernanceScope(data) 306 307 return acceptance_type_1 308 except (TypeError, ValueError, AttributeError, KeyError): 309 pass 310 return cast(GovernanceScope | None | Unset, data) 311 312 acceptance = _parse_acceptance(d.pop("acceptance", UNSET)) 313 314 def _parse_enactment_date(data: object) -> datetime.datetime | None | Unset: 315 if data is None: 316 return data 317 if isinstance(data, Unset): 318 return data 319 try: 320 if not isinstance(data, str): 321 raise TypeError() 322 enactment_date_type_0 = isoparse(data) 323 324 return enactment_date_type_0 325 except (TypeError, ValueError, AttributeError, KeyError): 326 pass 327 return cast(datetime.datetime | None | Unset, data) 328 329 enactment_date = _parse_enactment_date(d.pop("enactmentDate", UNSET)) 330 331 _expiration_type = d.pop("expirationType", UNSET) 332 expiration_type: GovernanceExpiryType | Unset 333 if isinstance(_expiration_type, Unset): 334 expiration_type = UNSET 335 else: 336 expiration_type = GovernanceExpiryType(_expiration_type) 337 338 def _parse_expiration_days_after_completion(data: object) -> int | None | Unset: 339 if data is None: 340 return data 341 if isinstance(data, Unset): 342 return data 343 return cast(int | None | Unset, data) 344 345 expiration_days_after_completion = _parse_expiration_days_after_completion( 346 d.pop("expirationDaysAfterCompletion", UNSET) 347 ) 348 349 def _parse_expiration_date(data: object) -> datetime.datetime | None | Unset: 350 if data is None: 351 return data 352 if isinstance(data, Unset): 353 return data 354 try: 355 if not isinstance(data, str): 356 raise TypeError() 357 expiration_date_type_0 = isoparse(data) 358 359 return expiration_date_type_0 360 except (TypeError, ValueError, AttributeError, KeyError): 361 pass 362 return cast(datetime.datetime | None | Unset, data) 363 364 expiration_date = _parse_expiration_date(d.pop("expirationDate", UNSET)) 365 366 def _parse_supplemental_docs(data: object) -> list[GovernanceFile] | None | Unset: 367 if data is None: 368 return data 369 if isinstance(data, Unset): 370 return data 371 try: 372 if not isinstance(data, list): 373 raise TypeError() 374 supplemental_docs_type_0 = [] 375 _supplemental_docs_type_0 = data 376 for supplemental_docs_type_0_item_data in _supplemental_docs_type_0: 377 supplemental_docs_type_0_item = GovernanceFile.from_dict(supplemental_docs_type_0_item_data) 378 379 supplemental_docs_type_0.append(supplemental_docs_type_0_item) 380 381 return supplemental_docs_type_0 382 except (TypeError, ValueError, AttributeError, KeyError): 383 pass 384 return cast(list[GovernanceFile] | None | Unset, data) 385 386 supplemental_docs = _parse_supplemental_docs(d.pop("supplementalDocs", UNSET)) 387 388 def _parse_file(data: object) -> GovernanceFile | None | Unset: 389 if data is None: 390 return data 391 if isinstance(data, Unset): 392 return data 393 try: 394 if not isinstance(data, dict): 395 raise TypeError() 396 file_type_1 = GovernanceFile.from_dict(data) 397 398 return file_type_1 399 except (TypeError, ValueError, AttributeError, KeyError): 400 pass 401 return cast(GovernanceFile | None | Unset, data) 402 403 file = _parse_file(d.pop("file", UNSET)) 404 405 def _parse_authorship(data: object) -> GovernanceScope | None | Unset: 406 if data is None: 407 return data 408 if isinstance(data, Unset): 409 return data 410 try: 411 if not isinstance(data, str): 412 raise TypeError() 413 authorship_type_1 = GovernanceScope(data) 414 415 return authorship_type_1 416 except (TypeError, ValueError, AttributeError, KeyError): 417 pass 418 return cast(GovernanceScope | None | Unset, data) 419 420 authorship = _parse_authorship(d.pop("authorship", UNSET)) 421 422 def _parse_verification_method(data: object) -> GovernanceTrainingVerification | None | Unset: 423 if data is None: 424 return data 425 if isinstance(data, Unset): 426 return data 427 try: 428 if not isinstance(data, str): 429 raise TypeError() 430 verification_method_type_1 = GovernanceTrainingVerification(data) 431 432 return verification_method_type_1 433 except (TypeError, ValueError, AttributeError, KeyError): 434 pass 435 return cast(GovernanceTrainingVerification | None | Unset, data) 436 437 verification_method = _parse_verification_method(d.pop("verificationMethod", UNSET)) 438 439 def _parse_fulfillment_id(data: object) -> None | str | Unset: 440 if data is None: 441 return data 442 if isinstance(data, Unset): 443 return data 444 return cast(None | str | Unset, data) 445 446 fulfillment_id = _parse_fulfillment_id(d.pop("fulfillmentId", UNSET)) 447 448 def _parse_fulfillment_date(data: object) -> datetime.datetime | None | Unset: 449 if data is None: 450 return data 451 if isinstance(data, Unset): 452 return data 453 try: 454 if not isinstance(data, str): 455 raise TypeError() 456 fulfillment_date_type_0 = isoparse(data) 457 458 return fulfillment_date_type_0 459 except (TypeError, ValueError, AttributeError, KeyError): 460 pass 461 return cast(datetime.datetime | None | Unset, data) 462 463 fulfillment_date = _parse_fulfillment_date(d.pop("fulfillmentDate", UNSET)) 464 465 def _parse_fulfillment_file(data: object) -> None | str | Unset: 466 if data is None: 467 return data 468 if isinstance(data, Unset): 469 return data 470 return cast(None | str | Unset, data) 471 472 fulfillment_file = _parse_fulfillment_file(d.pop("fulfillmentFile", UNSET)) 473 474 def _parse_fulfillment_path(data: object) -> None | str | Unset: 475 if data is None: 476 return data 477 if isinstance(data, Unset): 478 return data 479 return cast(None | str | Unset, data) 480 481 fulfillment_path = _parse_fulfillment_path(d.pop("fulfillmentPath", UNSET)) 482 483 requires_user_fulfillment = d.pop("requiresUserFulfillment", UNSET) 484 485 project_requirement = cls( 486 id=id, 487 name=name, 488 description=description, 489 type_=type_, 490 path=path, 491 supplemental_path=supplemental_path, 492 scope=scope, 493 contacts=contacts, 494 is_enacted=is_enacted, 495 is_project_configured=is_project_configured, 496 is_fulfilled=is_fulfilled, 497 acceptance=acceptance, 498 enactment_date=enactment_date, 499 expiration_type=expiration_type, 500 expiration_days_after_completion=expiration_days_after_completion, 501 expiration_date=expiration_date, 502 supplemental_docs=supplemental_docs, 503 file=file, 504 authorship=authorship, 505 verification_method=verification_method, 506 fulfillment_id=fulfillment_id, 507 fulfillment_date=fulfillment_date, 508 fulfillment_file=fulfillment_file, 509 fulfillment_path=fulfillment_path, 510 requires_user_fulfillment=requires_user_fulfillment, 511 ) 512 513 project_requirement.additional_properties = d 514 return project_requirement 515 516 @property 517 def additional_keys(self) -> list[str]: 518 return list(self.additional_properties.keys()) 519 520 def __getitem__(self, key: str) -> Any: 521 return self.additional_properties[key] 522 523 def __setitem__(self, key: str, value: Any) -> None: 524 self.additional_properties[key] = value 525 526 def __delitem__(self, key: str) -> None: 527 del self.additional_properties[key] 528 529 def __contains__(self, key: str) -> bool: 530 return key in self.additional_properties
Attributes:
- id (str): The unique identifier for the requirement
- name (str): The name of the requirement
- description (str): A brief description of the requirement
- type_ (GovernanceType): The types of governance requirements that can be enforced
- path (str): S3 prefix where the main file for the requirement is saved
- supplemental_path (str): S3 prefix where supplemental files for the requirement are saved
- scope (GovernanceScope): The levels at which governance requirements can be enforced
- contacts (list[GovernanceContact]): The governance contacts assigned to the requirement.
- is_enacted (bool): Whether the requirement is past the enactment date
- is_project_configured (bool): A requirement is project configured if it was created by the tenant but needs a file uploaded by the project
- is_fulfilled (bool): Whether the current user has fulfilled the requirement for this project
- acceptance (GovernanceScope | None | Unset): Specifies the level at which it is satisfied
- enactment_date (datetime.datetime | None | Unset): The date of enactment for the requirement
- expiration_type (GovernanceExpiryType | Unset): The expiry conditions that can be applied to governance requirements.
- expiration_days_after_completion (int | None | Unset): The number of days for a relative to completion expiration
- expiration_date (datetime.datetime | None | Unset): The date of expiration for the requirement
- supplemental_docs (list[GovernanceFile] | None | Unset): Optional files with extra information, e.g. templates for documents, links, etc
- file (GovernanceFile | None | Unset):
- authorship (GovernanceScope | None | Unset): Who needs to supply the agreement document
- verification_method (GovernanceTrainingVerification | None | Unset): The value indicating how the completion of the training is verified.
- fulfillment_id (None | str | Unset): The id for the requirement fulfillment
- fulfillment_date (datetime.datetime | None | Unset): The date the requirement was fulfilled by the user
- fulfillment_file (None | str | Unset): The optional file uploaded to fulfill the requirement
- fulfillment_path (None | str | Unset): The path to the optional fulfillment file
- requires_user_fulfillment (bool | Unset): Whether this requirement requires the user to fulfill (it is active, requires fulfillment, and user has not fulfilled
48def __init__(self, id, name, description, type_, path, supplemental_path, scope, contacts, is_enacted, is_project_configured, is_fulfilled, acceptance=attr_dict['acceptance'].default, enactment_date=attr_dict['enactment_date'].default, expiration_type=attr_dict['expiration_type'].default, expiration_days_after_completion=attr_dict['expiration_days_after_completion'].default, expiration_date=attr_dict['expiration_date'].default, supplemental_docs=attr_dict['supplemental_docs'].default, file=attr_dict['file'].default, authorship=attr_dict['authorship'].default, verification_method=attr_dict['verification_method'].default, fulfillment_id=attr_dict['fulfillment_id'].default, fulfillment_date=attr_dict['fulfillment_date'].default, fulfillment_file=attr_dict['fulfillment_file'].default, fulfillment_path=attr_dict['fulfillment_path'].default, requires_user_fulfillment=attr_dict['requires_user_fulfillment'].default): 49 self.id = id 50 self.name = name 51 self.description = description 52 self.type_ = type_ 53 self.path = path 54 self.supplemental_path = supplemental_path 55 self.scope = scope 56 self.contacts = contacts 57 self.is_enacted = is_enacted 58 self.is_project_configured = is_project_configured 59 self.is_fulfilled = is_fulfilled 60 self.acceptance = acceptance 61 self.enactment_date = enactment_date 62 self.expiration_type = expiration_type 63 self.expiration_days_after_completion = expiration_days_after_completion 64 self.expiration_date = expiration_date 65 self.supplemental_docs = supplemental_docs 66 self.file = file 67 self.authorship = authorship 68 self.verification_method = verification_method 69 self.fulfillment_id = fulfillment_id 70 self.fulfillment_date = fulfillment_date 71 self.fulfillment_file = fulfillment_file 72 self.fulfillment_path = fulfillment_path 73 self.requires_user_fulfillment = requires_user_fulfillment 74 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectRequirement.
90 def to_dict(self) -> dict[str, Any]: 91 from ..models.governance_file import GovernanceFile 92 93 id = self.id 94 95 name = self.name 96 97 description = self.description 98 99 type_ = self.type_.value 100 101 path = self.path 102 103 supplemental_path = self.supplemental_path 104 105 scope = self.scope.value 106 107 contacts = [] 108 for contacts_item_data in self.contacts: 109 contacts_item = contacts_item_data.to_dict() 110 contacts.append(contacts_item) 111 112 is_enacted = self.is_enacted 113 114 is_project_configured = self.is_project_configured 115 116 is_fulfilled = self.is_fulfilled 117 118 acceptance: None | str | Unset 119 if isinstance(self.acceptance, Unset): 120 acceptance = UNSET 121 elif isinstance(self.acceptance, GovernanceScope): 122 acceptance = self.acceptance.value 123 else: 124 acceptance = self.acceptance 125 126 enactment_date: None | str | Unset 127 if isinstance(self.enactment_date, Unset): 128 enactment_date = UNSET 129 elif isinstance(self.enactment_date, datetime.datetime): 130 enactment_date = self.enactment_date.isoformat() 131 else: 132 enactment_date = self.enactment_date 133 134 expiration_type: str | Unset = UNSET 135 if not isinstance(self.expiration_type, Unset): 136 expiration_type = self.expiration_type.value 137 138 expiration_days_after_completion: int | None | Unset 139 if isinstance(self.expiration_days_after_completion, Unset): 140 expiration_days_after_completion = UNSET 141 else: 142 expiration_days_after_completion = self.expiration_days_after_completion 143 144 expiration_date: None | str | Unset 145 if isinstance(self.expiration_date, Unset): 146 expiration_date = UNSET 147 elif isinstance(self.expiration_date, datetime.datetime): 148 expiration_date = self.expiration_date.isoformat() 149 else: 150 expiration_date = self.expiration_date 151 152 supplemental_docs: list[dict[str, Any]] | None | Unset 153 if isinstance(self.supplemental_docs, Unset): 154 supplemental_docs = UNSET 155 elif isinstance(self.supplemental_docs, list): 156 supplemental_docs = [] 157 for supplemental_docs_type_0_item_data in self.supplemental_docs: 158 supplemental_docs_type_0_item = supplemental_docs_type_0_item_data.to_dict() 159 supplemental_docs.append(supplemental_docs_type_0_item) 160 161 else: 162 supplemental_docs = self.supplemental_docs 163 164 file: dict[str, Any] | None | Unset 165 if isinstance(self.file, Unset): 166 file = UNSET 167 elif isinstance(self.file, GovernanceFile): 168 file = self.file.to_dict() 169 else: 170 file = self.file 171 172 authorship: None | str | Unset 173 if isinstance(self.authorship, Unset): 174 authorship = UNSET 175 elif isinstance(self.authorship, GovernanceScope): 176 authorship = self.authorship.value 177 else: 178 authorship = self.authorship 179 180 verification_method: None | str | Unset 181 if isinstance(self.verification_method, Unset): 182 verification_method = UNSET 183 elif isinstance(self.verification_method, GovernanceTrainingVerification): 184 verification_method = self.verification_method.value 185 else: 186 verification_method = self.verification_method 187 188 fulfillment_id: None | str | Unset 189 if isinstance(self.fulfillment_id, Unset): 190 fulfillment_id = UNSET 191 else: 192 fulfillment_id = self.fulfillment_id 193 194 fulfillment_date: None | str | Unset 195 if isinstance(self.fulfillment_date, Unset): 196 fulfillment_date = UNSET 197 elif isinstance(self.fulfillment_date, datetime.datetime): 198 fulfillment_date = self.fulfillment_date.isoformat() 199 else: 200 fulfillment_date = self.fulfillment_date 201 202 fulfillment_file: None | str | Unset 203 if isinstance(self.fulfillment_file, Unset): 204 fulfillment_file = UNSET 205 else: 206 fulfillment_file = self.fulfillment_file 207 208 fulfillment_path: None | str | Unset 209 if isinstance(self.fulfillment_path, Unset): 210 fulfillment_path = UNSET 211 else: 212 fulfillment_path = self.fulfillment_path 213 214 requires_user_fulfillment = self.requires_user_fulfillment 215 216 field_dict: dict[str, Any] = {} 217 field_dict.update(self.additional_properties) 218 field_dict.update( 219 { 220 "id": id, 221 "name": name, 222 "description": description, 223 "type": type_, 224 "path": path, 225 "supplementalPath": supplemental_path, 226 "scope": scope, 227 "contacts": contacts, 228 "isEnacted": is_enacted, 229 "isProjectConfigured": is_project_configured, 230 "isFulfilled": is_fulfilled, 231 } 232 ) 233 if acceptance is not UNSET: 234 field_dict["acceptance"] = acceptance 235 if enactment_date is not UNSET: 236 field_dict["enactmentDate"] = enactment_date 237 if expiration_type is not UNSET: 238 field_dict["expirationType"] = expiration_type 239 if expiration_days_after_completion is not UNSET: 240 field_dict["expirationDaysAfterCompletion"] = expiration_days_after_completion 241 if expiration_date is not UNSET: 242 field_dict["expirationDate"] = expiration_date 243 if supplemental_docs is not UNSET: 244 field_dict["supplementalDocs"] = supplemental_docs 245 if file is not UNSET: 246 field_dict["file"] = file 247 if authorship is not UNSET: 248 field_dict["authorship"] = authorship 249 if verification_method is not UNSET: 250 field_dict["verificationMethod"] = verification_method 251 if fulfillment_id is not UNSET: 252 field_dict["fulfillmentId"] = fulfillment_id 253 if fulfillment_date is not UNSET: 254 field_dict["fulfillmentDate"] = fulfillment_date 255 if fulfillment_file is not UNSET: 256 field_dict["fulfillmentFile"] = fulfillment_file 257 if fulfillment_path is not UNSET: 258 field_dict["fulfillmentPath"] = fulfillment_path 259 if requires_user_fulfillment is not UNSET: 260 field_dict["requiresUserFulfillment"] = requires_user_fulfillment 261 262 return field_dict
264 @classmethod 265 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 266 from ..models.governance_contact import GovernanceContact 267 from ..models.governance_file import GovernanceFile 268 269 d = dict(src_dict) 270 id = d.pop("id") 271 272 name = d.pop("name") 273 274 description = d.pop("description") 275 276 type_ = GovernanceType(d.pop("type")) 277 278 path = d.pop("path") 279 280 supplemental_path = d.pop("supplementalPath") 281 282 scope = GovernanceScope(d.pop("scope")) 283 284 contacts = [] 285 _contacts = d.pop("contacts") 286 for contacts_item_data in _contacts: 287 contacts_item = GovernanceContact.from_dict(contacts_item_data) 288 289 contacts.append(contacts_item) 290 291 is_enacted = d.pop("isEnacted") 292 293 is_project_configured = d.pop("isProjectConfigured") 294 295 is_fulfilled = d.pop("isFulfilled") 296 297 def _parse_acceptance(data: object) -> GovernanceScope | None | Unset: 298 if data is None: 299 return data 300 if isinstance(data, Unset): 301 return data 302 try: 303 if not isinstance(data, str): 304 raise TypeError() 305 acceptance_type_1 = GovernanceScope(data) 306 307 return acceptance_type_1 308 except (TypeError, ValueError, AttributeError, KeyError): 309 pass 310 return cast(GovernanceScope | None | Unset, data) 311 312 acceptance = _parse_acceptance(d.pop("acceptance", UNSET)) 313 314 def _parse_enactment_date(data: object) -> datetime.datetime | None | Unset: 315 if data is None: 316 return data 317 if isinstance(data, Unset): 318 return data 319 try: 320 if not isinstance(data, str): 321 raise TypeError() 322 enactment_date_type_0 = isoparse(data) 323 324 return enactment_date_type_0 325 except (TypeError, ValueError, AttributeError, KeyError): 326 pass 327 return cast(datetime.datetime | None | Unset, data) 328 329 enactment_date = _parse_enactment_date(d.pop("enactmentDate", UNSET)) 330 331 _expiration_type = d.pop("expirationType", UNSET) 332 expiration_type: GovernanceExpiryType | Unset 333 if isinstance(_expiration_type, Unset): 334 expiration_type = UNSET 335 else: 336 expiration_type = GovernanceExpiryType(_expiration_type) 337 338 def _parse_expiration_days_after_completion(data: object) -> int | None | Unset: 339 if data is None: 340 return data 341 if isinstance(data, Unset): 342 return data 343 return cast(int | None | Unset, data) 344 345 expiration_days_after_completion = _parse_expiration_days_after_completion( 346 d.pop("expirationDaysAfterCompletion", UNSET) 347 ) 348 349 def _parse_expiration_date(data: object) -> datetime.datetime | None | Unset: 350 if data is None: 351 return data 352 if isinstance(data, Unset): 353 return data 354 try: 355 if not isinstance(data, str): 356 raise TypeError() 357 expiration_date_type_0 = isoparse(data) 358 359 return expiration_date_type_0 360 except (TypeError, ValueError, AttributeError, KeyError): 361 pass 362 return cast(datetime.datetime | None | Unset, data) 363 364 expiration_date = _parse_expiration_date(d.pop("expirationDate", UNSET)) 365 366 def _parse_supplemental_docs(data: object) -> list[GovernanceFile] | None | Unset: 367 if data is None: 368 return data 369 if isinstance(data, Unset): 370 return data 371 try: 372 if not isinstance(data, list): 373 raise TypeError() 374 supplemental_docs_type_0 = [] 375 _supplemental_docs_type_0 = data 376 for supplemental_docs_type_0_item_data in _supplemental_docs_type_0: 377 supplemental_docs_type_0_item = GovernanceFile.from_dict(supplemental_docs_type_0_item_data) 378 379 supplemental_docs_type_0.append(supplemental_docs_type_0_item) 380 381 return supplemental_docs_type_0 382 except (TypeError, ValueError, AttributeError, KeyError): 383 pass 384 return cast(list[GovernanceFile] | None | Unset, data) 385 386 supplemental_docs = _parse_supplemental_docs(d.pop("supplementalDocs", UNSET)) 387 388 def _parse_file(data: object) -> GovernanceFile | None | Unset: 389 if data is None: 390 return data 391 if isinstance(data, Unset): 392 return data 393 try: 394 if not isinstance(data, dict): 395 raise TypeError() 396 file_type_1 = GovernanceFile.from_dict(data) 397 398 return file_type_1 399 except (TypeError, ValueError, AttributeError, KeyError): 400 pass 401 return cast(GovernanceFile | None | Unset, data) 402 403 file = _parse_file(d.pop("file", UNSET)) 404 405 def _parse_authorship(data: object) -> GovernanceScope | None | Unset: 406 if data is None: 407 return data 408 if isinstance(data, Unset): 409 return data 410 try: 411 if not isinstance(data, str): 412 raise TypeError() 413 authorship_type_1 = GovernanceScope(data) 414 415 return authorship_type_1 416 except (TypeError, ValueError, AttributeError, KeyError): 417 pass 418 return cast(GovernanceScope | None | Unset, data) 419 420 authorship = _parse_authorship(d.pop("authorship", UNSET)) 421 422 def _parse_verification_method(data: object) -> GovernanceTrainingVerification | None | Unset: 423 if data is None: 424 return data 425 if isinstance(data, Unset): 426 return data 427 try: 428 if not isinstance(data, str): 429 raise TypeError() 430 verification_method_type_1 = GovernanceTrainingVerification(data) 431 432 return verification_method_type_1 433 except (TypeError, ValueError, AttributeError, KeyError): 434 pass 435 return cast(GovernanceTrainingVerification | None | Unset, data) 436 437 verification_method = _parse_verification_method(d.pop("verificationMethod", UNSET)) 438 439 def _parse_fulfillment_id(data: object) -> None | str | Unset: 440 if data is None: 441 return data 442 if isinstance(data, Unset): 443 return data 444 return cast(None | str | Unset, data) 445 446 fulfillment_id = _parse_fulfillment_id(d.pop("fulfillmentId", UNSET)) 447 448 def _parse_fulfillment_date(data: object) -> datetime.datetime | None | Unset: 449 if data is None: 450 return data 451 if isinstance(data, Unset): 452 return data 453 try: 454 if not isinstance(data, str): 455 raise TypeError() 456 fulfillment_date_type_0 = isoparse(data) 457 458 return fulfillment_date_type_0 459 except (TypeError, ValueError, AttributeError, KeyError): 460 pass 461 return cast(datetime.datetime | None | Unset, data) 462 463 fulfillment_date = _parse_fulfillment_date(d.pop("fulfillmentDate", UNSET)) 464 465 def _parse_fulfillment_file(data: object) -> None | str | Unset: 466 if data is None: 467 return data 468 if isinstance(data, Unset): 469 return data 470 return cast(None | str | Unset, data) 471 472 fulfillment_file = _parse_fulfillment_file(d.pop("fulfillmentFile", UNSET)) 473 474 def _parse_fulfillment_path(data: object) -> None | str | Unset: 475 if data is None: 476 return data 477 if isinstance(data, Unset): 478 return data 479 return cast(None | str | Unset, data) 480 481 fulfillment_path = _parse_fulfillment_path(d.pop("fulfillmentPath", UNSET)) 482 483 requires_user_fulfillment = d.pop("requiresUserFulfillment", UNSET) 484 485 project_requirement = cls( 486 id=id, 487 name=name, 488 description=description, 489 type_=type_, 490 path=path, 491 supplemental_path=supplemental_path, 492 scope=scope, 493 contacts=contacts, 494 is_enacted=is_enacted, 495 is_project_configured=is_project_configured, 496 is_fulfilled=is_fulfilled, 497 acceptance=acceptance, 498 enactment_date=enactment_date, 499 expiration_type=expiration_type, 500 expiration_days_after_completion=expiration_days_after_completion, 501 expiration_date=expiration_date, 502 supplemental_docs=supplemental_docs, 503 file=file, 504 authorship=authorship, 505 verification_method=verification_method, 506 fulfillment_id=fulfillment_id, 507 fulfillment_date=fulfillment_date, 508 fulfillment_file=fulfillment_file, 509 fulfillment_path=fulfillment_path, 510 requires_user_fulfillment=requires_user_fulfillment, 511 ) 512 513 project_requirement.additional_properties = d 514 return project_requirement
5class ProjectRole(str, Enum): 6 ADMIN = "ADMIN" 7 COLLABORATOR = "COLLABORATOR" 8 CONTRIBUTOR = "CONTRIBUTOR" 9 NONE = "NONE" 10 OWNER = "OWNER" 11 UNKNOWN = "UNKNOWN" 12 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 13 14 def __str__(self) -> str: 15 return str(self.value) 16 17 @classmethod 18 def _missing_(cls, number): 19 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
16@_attrs_define 17class ProjectSettings: 18 """ 19 Attributes: 20 budget_amount (int): Total allowed cost for the budget period 21 budget_period (BudgetPeriod): Time period associated with the budget amount 22 enable_backup (bool | Unset): Enables the AWS Backup service for S3 Default: False. 23 enable_sftp (bool | Unset): Enables access to files over SFTP Default: False. 24 service_connections (list[str] | Unset): List of service connections to enable 25 kms_arn (None | str | Unset): KMS Key ARN to encrypt S3 objects, if not provided, default bucket encryption will 26 be used 27 retention_policy_days (int | Unset): Days to keep deleted datasets before being permanently erased Default: 7. 28 temporary_storage_lifetime_days (int | Unset): Days to keep temporary storage space (workflow executor cache) 29 Default: 14. 30 vpc_id (None | str | Unset): VPC that the compute environment will use Example: vpc-00000000000000000. 31 batch_subnets (list[str] | None | Unset): List of subnets that the pipeline compute environment will use 32 Example: ['subnet-00000000000000000']. 33 sagemaker_subnets (list[str] | None | Unset): List of subnets that the sagemaker instances will use Example: 34 ['subnet-00000000000000000']. 35 workspace_subnets (list[str] | None | Unset): List of subnets that workspace instances will use Example: 36 ['subnet-00000000000000000']. 37 max_spot_vcpu (int | Unset): vCPU service quota limit for standard spot instances (pipelines) Default: 0. 38 max_fpgavcpu (int | Unset): vCPU service quota limit for FPGA-enabled instances (pipelines) Default: 0. 39 max_gpuvcpu (int | Unset): vCPU service quota limit for GPU-enabled spot instances (pipelines) Default: 0. 40 enable_dragen (bool | Unset): Enables the DRAGEN compute environment (pipelines) Default: False. 41 dragen_ami (None | str | Unset): AMI ID for the DRAGEN compute environment, if enabled (pipelines) 42 max_workspaces_vcpu (int | Unset): vCPU service quota limit for standard instances (workspaces) Default: 0. 43 max_workspaces_gpuvcpu (int | Unset): vCPU service quota limit for GPU-enabled instances (workspaces) Default: 44 0. 45 max_workspaces_per_user (int | Unset): Maximum number of workspaces per user (workspaces) Default: 0. 46 is_discoverable (bool | None | Unset): Enables the project to be discoverable by other users Default: False. 47 is_shareable (bool | None | Unset): Enables the project to be shared with other projects Default: False. 48 has_pipelines_enabled (bool | None | Unset): (Read-only) Whether this project has pipelines enabled Default: 49 False. 50 has_workspaces_enabled (bool | None | Unset): (Read-only) Whether this project has workspaces enabled Default: 51 False. 52 """ 53 54 budget_amount: int 55 budget_period: BudgetPeriod 56 enable_backup: bool | Unset = False 57 enable_sftp: bool | Unset = False 58 service_connections: list[str] | Unset = UNSET 59 kms_arn: None | str | Unset = UNSET 60 retention_policy_days: int | Unset = 7 61 temporary_storage_lifetime_days: int | Unset = 14 62 vpc_id: None | str | Unset = UNSET 63 batch_subnets: list[str] | None | Unset = UNSET 64 sagemaker_subnets: list[str] | None | Unset = UNSET 65 workspace_subnets: list[str] | None | Unset = UNSET 66 max_spot_vcpu: int | Unset = 0 67 max_fpgavcpu: int | Unset = 0 68 max_gpuvcpu: int | Unset = 0 69 enable_dragen: bool | Unset = False 70 dragen_ami: None | str | Unset = UNSET 71 max_workspaces_vcpu: int | Unset = 0 72 max_workspaces_gpuvcpu: int | Unset = 0 73 max_workspaces_per_user: int | Unset = 0 74 is_discoverable: bool | None | Unset = False 75 is_shareable: bool | None | Unset = False 76 has_pipelines_enabled: bool | None | Unset = False 77 has_workspaces_enabled: bool | None | Unset = False 78 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 79 80 def to_dict(self) -> dict[str, Any]: 81 budget_amount = self.budget_amount 82 83 budget_period = self.budget_period.value 84 85 enable_backup = self.enable_backup 86 87 enable_sftp = self.enable_sftp 88 89 service_connections: list[str] | Unset = UNSET 90 if not isinstance(self.service_connections, Unset): 91 service_connections = self.service_connections 92 93 kms_arn: None | str | Unset 94 if isinstance(self.kms_arn, Unset): 95 kms_arn = UNSET 96 else: 97 kms_arn = self.kms_arn 98 99 retention_policy_days = self.retention_policy_days 100 101 temporary_storage_lifetime_days = self.temporary_storage_lifetime_days 102 103 vpc_id: None | str | Unset 104 if isinstance(self.vpc_id, Unset): 105 vpc_id = UNSET 106 else: 107 vpc_id = self.vpc_id 108 109 batch_subnets: list[str] | None | Unset 110 if isinstance(self.batch_subnets, Unset): 111 batch_subnets = UNSET 112 elif isinstance(self.batch_subnets, list): 113 batch_subnets = self.batch_subnets 114 115 else: 116 batch_subnets = self.batch_subnets 117 118 sagemaker_subnets: list[str] | None | Unset 119 if isinstance(self.sagemaker_subnets, Unset): 120 sagemaker_subnets = UNSET 121 elif isinstance(self.sagemaker_subnets, list): 122 sagemaker_subnets = self.sagemaker_subnets 123 124 else: 125 sagemaker_subnets = self.sagemaker_subnets 126 127 workspace_subnets: list[str] | None | Unset 128 if isinstance(self.workspace_subnets, Unset): 129 workspace_subnets = UNSET 130 elif isinstance(self.workspace_subnets, list): 131 workspace_subnets = self.workspace_subnets 132 133 else: 134 workspace_subnets = self.workspace_subnets 135 136 max_spot_vcpu = self.max_spot_vcpu 137 138 max_fpgavcpu = self.max_fpgavcpu 139 140 max_gpuvcpu = self.max_gpuvcpu 141 142 enable_dragen = self.enable_dragen 143 144 dragen_ami: None | str | Unset 145 if isinstance(self.dragen_ami, Unset): 146 dragen_ami = UNSET 147 else: 148 dragen_ami = self.dragen_ami 149 150 max_workspaces_vcpu = self.max_workspaces_vcpu 151 152 max_workspaces_gpuvcpu = self.max_workspaces_gpuvcpu 153 154 max_workspaces_per_user = self.max_workspaces_per_user 155 156 is_discoverable: bool | None | Unset 157 if isinstance(self.is_discoverable, Unset): 158 is_discoverable = UNSET 159 else: 160 is_discoverable = self.is_discoverable 161 162 is_shareable: bool | None | Unset 163 if isinstance(self.is_shareable, Unset): 164 is_shareable = UNSET 165 else: 166 is_shareable = self.is_shareable 167 168 has_pipelines_enabled: bool | None | Unset 169 if isinstance(self.has_pipelines_enabled, Unset): 170 has_pipelines_enabled = UNSET 171 else: 172 has_pipelines_enabled = self.has_pipelines_enabled 173 174 has_workspaces_enabled: bool | None | Unset 175 if isinstance(self.has_workspaces_enabled, Unset): 176 has_workspaces_enabled = UNSET 177 else: 178 has_workspaces_enabled = self.has_workspaces_enabled 179 180 field_dict: dict[str, Any] = {} 181 field_dict.update(self.additional_properties) 182 field_dict.update( 183 { 184 "budgetAmount": budget_amount, 185 "budgetPeriod": budget_period, 186 } 187 ) 188 if enable_backup is not UNSET: 189 field_dict["enableBackup"] = enable_backup 190 if enable_sftp is not UNSET: 191 field_dict["enableSftp"] = enable_sftp 192 if service_connections is not UNSET: 193 field_dict["serviceConnections"] = service_connections 194 if kms_arn is not UNSET: 195 field_dict["kmsArn"] = kms_arn 196 if retention_policy_days is not UNSET: 197 field_dict["retentionPolicyDays"] = retention_policy_days 198 if temporary_storage_lifetime_days is not UNSET: 199 field_dict["temporaryStorageLifetimeDays"] = temporary_storage_lifetime_days 200 if vpc_id is not UNSET: 201 field_dict["vpcId"] = vpc_id 202 if batch_subnets is not UNSET: 203 field_dict["batchSubnets"] = batch_subnets 204 if sagemaker_subnets is not UNSET: 205 field_dict["sagemakerSubnets"] = sagemaker_subnets 206 if workspace_subnets is not UNSET: 207 field_dict["workspaceSubnets"] = workspace_subnets 208 if max_spot_vcpu is not UNSET: 209 field_dict["maxSpotVCPU"] = max_spot_vcpu 210 if max_fpgavcpu is not UNSET: 211 field_dict["maxFPGAVCPU"] = max_fpgavcpu 212 if max_gpuvcpu is not UNSET: 213 field_dict["maxGPUVCPU"] = max_gpuvcpu 214 if enable_dragen is not UNSET: 215 field_dict["enableDragen"] = enable_dragen 216 if dragen_ami is not UNSET: 217 field_dict["dragenAmi"] = dragen_ami 218 if max_workspaces_vcpu is not UNSET: 219 field_dict["maxWorkspacesVCPU"] = max_workspaces_vcpu 220 if max_workspaces_gpuvcpu is not UNSET: 221 field_dict["maxWorkspacesGPUVCPU"] = max_workspaces_gpuvcpu 222 if max_workspaces_per_user is not UNSET: 223 field_dict["maxWorkspacesPerUser"] = max_workspaces_per_user 224 if is_discoverable is not UNSET: 225 field_dict["isDiscoverable"] = is_discoverable 226 if is_shareable is not UNSET: 227 field_dict["isShareable"] = is_shareable 228 if has_pipelines_enabled is not UNSET: 229 field_dict["hasPipelinesEnabled"] = has_pipelines_enabled 230 if has_workspaces_enabled is not UNSET: 231 field_dict["hasWorkspacesEnabled"] = has_workspaces_enabled 232 233 return field_dict 234 235 @classmethod 236 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 237 d = dict(src_dict) 238 budget_amount = d.pop("budgetAmount") 239 240 budget_period = BudgetPeriod(d.pop("budgetPeriod")) 241 242 enable_backup = d.pop("enableBackup", UNSET) 243 244 enable_sftp = d.pop("enableSftp", UNSET) 245 246 service_connections = cast(list[str], d.pop("serviceConnections", UNSET)) 247 248 def _parse_kms_arn(data: object) -> None | str | Unset: 249 if data is None: 250 return data 251 if isinstance(data, Unset): 252 return data 253 return cast(None | str | Unset, data) 254 255 kms_arn = _parse_kms_arn(d.pop("kmsArn", UNSET)) 256 257 retention_policy_days = d.pop("retentionPolicyDays", UNSET) 258 259 temporary_storage_lifetime_days = d.pop("temporaryStorageLifetimeDays", UNSET) 260 261 def _parse_vpc_id(data: object) -> None | str | Unset: 262 if data is None: 263 return data 264 if isinstance(data, Unset): 265 return data 266 return cast(None | str | Unset, data) 267 268 vpc_id = _parse_vpc_id(d.pop("vpcId", UNSET)) 269 270 def _parse_batch_subnets(data: object) -> list[str] | None | Unset: 271 if data is None: 272 return data 273 if isinstance(data, Unset): 274 return data 275 try: 276 if not isinstance(data, list): 277 raise TypeError() 278 batch_subnets_type_0 = cast(list[str], data) 279 280 return batch_subnets_type_0 281 except (TypeError, ValueError, AttributeError, KeyError): 282 pass 283 return cast(list[str] | None | Unset, data) 284 285 batch_subnets = _parse_batch_subnets(d.pop("batchSubnets", UNSET)) 286 287 def _parse_sagemaker_subnets(data: object) -> list[str] | None | Unset: 288 if data is None: 289 return data 290 if isinstance(data, Unset): 291 return data 292 try: 293 if not isinstance(data, list): 294 raise TypeError() 295 sagemaker_subnets_type_0 = cast(list[str], data) 296 297 return sagemaker_subnets_type_0 298 except (TypeError, ValueError, AttributeError, KeyError): 299 pass 300 return cast(list[str] | None | Unset, data) 301 302 sagemaker_subnets = _parse_sagemaker_subnets(d.pop("sagemakerSubnets", UNSET)) 303 304 def _parse_workspace_subnets(data: object) -> list[str] | None | Unset: 305 if data is None: 306 return data 307 if isinstance(data, Unset): 308 return data 309 try: 310 if not isinstance(data, list): 311 raise TypeError() 312 workspace_subnets_type_0 = cast(list[str], data) 313 314 return workspace_subnets_type_0 315 except (TypeError, ValueError, AttributeError, KeyError): 316 pass 317 return cast(list[str] | None | Unset, data) 318 319 workspace_subnets = _parse_workspace_subnets(d.pop("workspaceSubnets", UNSET)) 320 321 max_spot_vcpu = d.pop("maxSpotVCPU", UNSET) 322 323 max_fpgavcpu = d.pop("maxFPGAVCPU", UNSET) 324 325 max_gpuvcpu = d.pop("maxGPUVCPU", UNSET) 326 327 enable_dragen = d.pop("enableDragen", UNSET) 328 329 def _parse_dragen_ami(data: object) -> None | str | Unset: 330 if data is None: 331 return data 332 if isinstance(data, Unset): 333 return data 334 return cast(None | str | Unset, data) 335 336 dragen_ami = _parse_dragen_ami(d.pop("dragenAmi", UNSET)) 337 338 max_workspaces_vcpu = d.pop("maxWorkspacesVCPU", UNSET) 339 340 max_workspaces_gpuvcpu = d.pop("maxWorkspacesGPUVCPU", UNSET) 341 342 max_workspaces_per_user = d.pop("maxWorkspacesPerUser", UNSET) 343 344 def _parse_is_discoverable(data: object) -> bool | None | Unset: 345 if data is None: 346 return data 347 if isinstance(data, Unset): 348 return data 349 return cast(bool | None | Unset, data) 350 351 is_discoverable = _parse_is_discoverable(d.pop("isDiscoverable", UNSET)) 352 353 def _parse_is_shareable(data: object) -> bool | None | Unset: 354 if data is None: 355 return data 356 if isinstance(data, Unset): 357 return data 358 return cast(bool | None | Unset, data) 359 360 is_shareable = _parse_is_shareable(d.pop("isShareable", UNSET)) 361 362 def _parse_has_pipelines_enabled(data: object) -> bool | None | Unset: 363 if data is None: 364 return data 365 if isinstance(data, Unset): 366 return data 367 return cast(bool | None | Unset, data) 368 369 has_pipelines_enabled = _parse_has_pipelines_enabled(d.pop("hasPipelinesEnabled", UNSET)) 370 371 def _parse_has_workspaces_enabled(data: object) -> bool | None | Unset: 372 if data is None: 373 return data 374 if isinstance(data, Unset): 375 return data 376 return cast(bool | None | Unset, data) 377 378 has_workspaces_enabled = _parse_has_workspaces_enabled(d.pop("hasWorkspacesEnabled", UNSET)) 379 380 project_settings = cls( 381 budget_amount=budget_amount, 382 budget_period=budget_period, 383 enable_backup=enable_backup, 384 enable_sftp=enable_sftp, 385 service_connections=service_connections, 386 kms_arn=kms_arn, 387 retention_policy_days=retention_policy_days, 388 temporary_storage_lifetime_days=temporary_storage_lifetime_days, 389 vpc_id=vpc_id, 390 batch_subnets=batch_subnets, 391 sagemaker_subnets=sagemaker_subnets, 392 workspace_subnets=workspace_subnets, 393 max_spot_vcpu=max_spot_vcpu, 394 max_fpgavcpu=max_fpgavcpu, 395 max_gpuvcpu=max_gpuvcpu, 396 enable_dragen=enable_dragen, 397 dragen_ami=dragen_ami, 398 max_workspaces_vcpu=max_workspaces_vcpu, 399 max_workspaces_gpuvcpu=max_workspaces_gpuvcpu, 400 max_workspaces_per_user=max_workspaces_per_user, 401 is_discoverable=is_discoverable, 402 is_shareable=is_shareable, 403 has_pipelines_enabled=has_pipelines_enabled, 404 has_workspaces_enabled=has_workspaces_enabled, 405 ) 406 407 project_settings.additional_properties = d 408 return project_settings 409 410 @property 411 def additional_keys(self) -> list[str]: 412 return list(self.additional_properties.keys()) 413 414 def __getitem__(self, key: str) -> Any: 415 return self.additional_properties[key] 416 417 def __setitem__(self, key: str, value: Any) -> None: 418 self.additional_properties[key] = value 419 420 def __delitem__(self, key: str) -> None: 421 del self.additional_properties[key] 422 423 def __contains__(self, key: str) -> bool: 424 return key in self.additional_properties
Attributes:
- budget_amount (int): Total allowed cost for the budget period
- budget_period (BudgetPeriod): Time period associated with the budget amount
- enable_backup (bool | Unset): Enables the AWS Backup service for S3 Default: False.
- enable_sftp (bool | Unset): Enables access to files over SFTP Default: False.
- service_connections (list[str] | Unset): List of service connections to enable
- kms_arn (None | str | Unset): KMS Key ARN to encrypt S3 objects, if not provided, default bucket encryption will be used
- retention_policy_days (int | Unset): Days to keep deleted datasets before being permanently erased Default: 7.
- temporary_storage_lifetime_days (int | Unset): Days to keep temporary storage space (workflow executor cache) Default: 14.
- vpc_id (None | str | Unset): VPC that the compute environment will use Example: vpc-00000000000000000.
- batch_subnets (list[str] | None | Unset): List of subnets that the pipeline compute environment will use Example: ['subnet-00000000000000000'].
- sagemaker_subnets (list[str] | None | Unset): List of subnets that the sagemaker instances will use Example: ['subnet-00000000000000000'].
- workspace_subnets (list[str] | None | Unset): List of subnets that workspace instances will use Example: ['subnet-00000000000000000'].
- max_spot_vcpu (int | Unset): vCPU service quota limit for standard spot instances (pipelines) Default: 0.
- max_fpgavcpu (int | Unset): vCPU service quota limit for FPGA-enabled instances (pipelines) Default: 0.
- max_gpuvcpu (int | Unset): vCPU service quota limit for GPU-enabled spot instances (pipelines) Default: 0.
- enable_dragen (bool | Unset): Enables the DRAGEN compute environment (pipelines) Default: False.
- dragen_ami (None | str | Unset): AMI ID for the DRAGEN compute environment, if enabled (pipelines)
- max_workspaces_vcpu (int | Unset): vCPU service quota limit for standard instances (workspaces) Default: 0.
- max_workspaces_gpuvcpu (int | Unset): vCPU service quota limit for GPU-enabled instances (workspaces) Default: 0.
- max_workspaces_per_user (int | Unset): Maximum number of workspaces per user (workspaces) Default: 0.
- is_discoverable (bool | None | Unset): Enables the project to be discoverable by other users Default: False.
- is_shareable (bool | None | Unset): Enables the project to be shared with other projects Default: False.
- has_pipelines_enabled (bool | None | Unset): (Read-only) Whether this project has pipelines enabled Default: False.
- has_workspaces_enabled (bool | None | Unset): (Read-only) Whether this project has workspaces enabled Default: False.
47def __init__(self, budget_amount, budget_period, enable_backup=attr_dict['enable_backup'].default, enable_sftp=attr_dict['enable_sftp'].default, service_connections=attr_dict['service_connections'].default, kms_arn=attr_dict['kms_arn'].default, retention_policy_days=attr_dict['retention_policy_days'].default, temporary_storage_lifetime_days=attr_dict['temporary_storage_lifetime_days'].default, vpc_id=attr_dict['vpc_id'].default, batch_subnets=attr_dict['batch_subnets'].default, sagemaker_subnets=attr_dict['sagemaker_subnets'].default, workspace_subnets=attr_dict['workspace_subnets'].default, max_spot_vcpu=attr_dict['max_spot_vcpu'].default, max_fpgavcpu=attr_dict['max_fpgavcpu'].default, max_gpuvcpu=attr_dict['max_gpuvcpu'].default, enable_dragen=attr_dict['enable_dragen'].default, dragen_ami=attr_dict['dragen_ami'].default, max_workspaces_vcpu=attr_dict['max_workspaces_vcpu'].default, max_workspaces_gpuvcpu=attr_dict['max_workspaces_gpuvcpu'].default, max_workspaces_per_user=attr_dict['max_workspaces_per_user'].default, is_discoverable=attr_dict['is_discoverable'].default, is_shareable=attr_dict['is_shareable'].default, has_pipelines_enabled=attr_dict['has_pipelines_enabled'].default, has_workspaces_enabled=attr_dict['has_workspaces_enabled'].default): 48 self.budget_amount = budget_amount 49 self.budget_period = budget_period 50 self.enable_backup = enable_backup 51 self.enable_sftp = enable_sftp 52 self.service_connections = service_connections 53 self.kms_arn = kms_arn 54 self.retention_policy_days = retention_policy_days 55 self.temporary_storage_lifetime_days = temporary_storage_lifetime_days 56 self.vpc_id = vpc_id 57 self.batch_subnets = batch_subnets 58 self.sagemaker_subnets = sagemaker_subnets 59 self.workspace_subnets = workspace_subnets 60 self.max_spot_vcpu = max_spot_vcpu 61 self.max_fpgavcpu = max_fpgavcpu 62 self.max_gpuvcpu = max_gpuvcpu 63 self.enable_dragen = enable_dragen 64 self.dragen_ami = dragen_ami 65 self.max_workspaces_vcpu = max_workspaces_vcpu 66 self.max_workspaces_gpuvcpu = max_workspaces_gpuvcpu 67 self.max_workspaces_per_user = max_workspaces_per_user 68 self.is_discoverable = is_discoverable 69 self.is_shareable = is_shareable 70 self.has_pipelines_enabled = has_pipelines_enabled 71 self.has_workspaces_enabled = has_workspaces_enabled 72 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectSettings.
80 def to_dict(self) -> dict[str, Any]: 81 budget_amount = self.budget_amount 82 83 budget_period = self.budget_period.value 84 85 enable_backup = self.enable_backup 86 87 enable_sftp = self.enable_sftp 88 89 service_connections: list[str] | Unset = UNSET 90 if not isinstance(self.service_connections, Unset): 91 service_connections = self.service_connections 92 93 kms_arn: None | str | Unset 94 if isinstance(self.kms_arn, Unset): 95 kms_arn = UNSET 96 else: 97 kms_arn = self.kms_arn 98 99 retention_policy_days = self.retention_policy_days 100 101 temporary_storage_lifetime_days = self.temporary_storage_lifetime_days 102 103 vpc_id: None | str | Unset 104 if isinstance(self.vpc_id, Unset): 105 vpc_id = UNSET 106 else: 107 vpc_id = self.vpc_id 108 109 batch_subnets: list[str] | None | Unset 110 if isinstance(self.batch_subnets, Unset): 111 batch_subnets = UNSET 112 elif isinstance(self.batch_subnets, list): 113 batch_subnets = self.batch_subnets 114 115 else: 116 batch_subnets = self.batch_subnets 117 118 sagemaker_subnets: list[str] | None | Unset 119 if isinstance(self.sagemaker_subnets, Unset): 120 sagemaker_subnets = UNSET 121 elif isinstance(self.sagemaker_subnets, list): 122 sagemaker_subnets = self.sagemaker_subnets 123 124 else: 125 sagemaker_subnets = self.sagemaker_subnets 126 127 workspace_subnets: list[str] | None | Unset 128 if isinstance(self.workspace_subnets, Unset): 129 workspace_subnets = UNSET 130 elif isinstance(self.workspace_subnets, list): 131 workspace_subnets = self.workspace_subnets 132 133 else: 134 workspace_subnets = self.workspace_subnets 135 136 max_spot_vcpu = self.max_spot_vcpu 137 138 max_fpgavcpu = self.max_fpgavcpu 139 140 max_gpuvcpu = self.max_gpuvcpu 141 142 enable_dragen = self.enable_dragen 143 144 dragen_ami: None | str | Unset 145 if isinstance(self.dragen_ami, Unset): 146 dragen_ami = UNSET 147 else: 148 dragen_ami = self.dragen_ami 149 150 max_workspaces_vcpu = self.max_workspaces_vcpu 151 152 max_workspaces_gpuvcpu = self.max_workspaces_gpuvcpu 153 154 max_workspaces_per_user = self.max_workspaces_per_user 155 156 is_discoverable: bool | None | Unset 157 if isinstance(self.is_discoverable, Unset): 158 is_discoverable = UNSET 159 else: 160 is_discoverable = self.is_discoverable 161 162 is_shareable: bool | None | Unset 163 if isinstance(self.is_shareable, Unset): 164 is_shareable = UNSET 165 else: 166 is_shareable = self.is_shareable 167 168 has_pipelines_enabled: bool | None | Unset 169 if isinstance(self.has_pipelines_enabled, Unset): 170 has_pipelines_enabled = UNSET 171 else: 172 has_pipelines_enabled = self.has_pipelines_enabled 173 174 has_workspaces_enabled: bool | None | Unset 175 if isinstance(self.has_workspaces_enabled, Unset): 176 has_workspaces_enabled = UNSET 177 else: 178 has_workspaces_enabled = self.has_workspaces_enabled 179 180 field_dict: dict[str, Any] = {} 181 field_dict.update(self.additional_properties) 182 field_dict.update( 183 { 184 "budgetAmount": budget_amount, 185 "budgetPeriod": budget_period, 186 } 187 ) 188 if enable_backup is not UNSET: 189 field_dict["enableBackup"] = enable_backup 190 if enable_sftp is not UNSET: 191 field_dict["enableSftp"] = enable_sftp 192 if service_connections is not UNSET: 193 field_dict["serviceConnections"] = service_connections 194 if kms_arn is not UNSET: 195 field_dict["kmsArn"] = kms_arn 196 if retention_policy_days is not UNSET: 197 field_dict["retentionPolicyDays"] = retention_policy_days 198 if temporary_storage_lifetime_days is not UNSET: 199 field_dict["temporaryStorageLifetimeDays"] = temporary_storage_lifetime_days 200 if vpc_id is not UNSET: 201 field_dict["vpcId"] = vpc_id 202 if batch_subnets is not UNSET: 203 field_dict["batchSubnets"] = batch_subnets 204 if sagemaker_subnets is not UNSET: 205 field_dict["sagemakerSubnets"] = sagemaker_subnets 206 if workspace_subnets is not UNSET: 207 field_dict["workspaceSubnets"] = workspace_subnets 208 if max_spot_vcpu is not UNSET: 209 field_dict["maxSpotVCPU"] = max_spot_vcpu 210 if max_fpgavcpu is not UNSET: 211 field_dict["maxFPGAVCPU"] = max_fpgavcpu 212 if max_gpuvcpu is not UNSET: 213 field_dict["maxGPUVCPU"] = max_gpuvcpu 214 if enable_dragen is not UNSET: 215 field_dict["enableDragen"] = enable_dragen 216 if dragen_ami is not UNSET: 217 field_dict["dragenAmi"] = dragen_ami 218 if max_workspaces_vcpu is not UNSET: 219 field_dict["maxWorkspacesVCPU"] = max_workspaces_vcpu 220 if max_workspaces_gpuvcpu is not UNSET: 221 field_dict["maxWorkspacesGPUVCPU"] = max_workspaces_gpuvcpu 222 if max_workspaces_per_user is not UNSET: 223 field_dict["maxWorkspacesPerUser"] = max_workspaces_per_user 224 if is_discoverable is not UNSET: 225 field_dict["isDiscoverable"] = is_discoverable 226 if is_shareable is not UNSET: 227 field_dict["isShareable"] = is_shareable 228 if has_pipelines_enabled is not UNSET: 229 field_dict["hasPipelinesEnabled"] = has_pipelines_enabled 230 if has_workspaces_enabled is not UNSET: 231 field_dict["hasWorkspacesEnabled"] = has_workspaces_enabled 232 233 return field_dict
235 @classmethod 236 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 237 d = dict(src_dict) 238 budget_amount = d.pop("budgetAmount") 239 240 budget_period = BudgetPeriod(d.pop("budgetPeriod")) 241 242 enable_backup = d.pop("enableBackup", UNSET) 243 244 enable_sftp = d.pop("enableSftp", UNSET) 245 246 service_connections = cast(list[str], d.pop("serviceConnections", UNSET)) 247 248 def _parse_kms_arn(data: object) -> None | str | Unset: 249 if data is None: 250 return data 251 if isinstance(data, Unset): 252 return data 253 return cast(None | str | Unset, data) 254 255 kms_arn = _parse_kms_arn(d.pop("kmsArn", UNSET)) 256 257 retention_policy_days = d.pop("retentionPolicyDays", UNSET) 258 259 temporary_storage_lifetime_days = d.pop("temporaryStorageLifetimeDays", UNSET) 260 261 def _parse_vpc_id(data: object) -> None | str | Unset: 262 if data is None: 263 return data 264 if isinstance(data, Unset): 265 return data 266 return cast(None | str | Unset, data) 267 268 vpc_id = _parse_vpc_id(d.pop("vpcId", UNSET)) 269 270 def _parse_batch_subnets(data: object) -> list[str] | None | Unset: 271 if data is None: 272 return data 273 if isinstance(data, Unset): 274 return data 275 try: 276 if not isinstance(data, list): 277 raise TypeError() 278 batch_subnets_type_0 = cast(list[str], data) 279 280 return batch_subnets_type_0 281 except (TypeError, ValueError, AttributeError, KeyError): 282 pass 283 return cast(list[str] | None | Unset, data) 284 285 batch_subnets = _parse_batch_subnets(d.pop("batchSubnets", UNSET)) 286 287 def _parse_sagemaker_subnets(data: object) -> list[str] | None | Unset: 288 if data is None: 289 return data 290 if isinstance(data, Unset): 291 return data 292 try: 293 if not isinstance(data, list): 294 raise TypeError() 295 sagemaker_subnets_type_0 = cast(list[str], data) 296 297 return sagemaker_subnets_type_0 298 except (TypeError, ValueError, AttributeError, KeyError): 299 pass 300 return cast(list[str] | None | Unset, data) 301 302 sagemaker_subnets = _parse_sagemaker_subnets(d.pop("sagemakerSubnets", UNSET)) 303 304 def _parse_workspace_subnets(data: object) -> list[str] | None | Unset: 305 if data is None: 306 return data 307 if isinstance(data, Unset): 308 return data 309 try: 310 if not isinstance(data, list): 311 raise TypeError() 312 workspace_subnets_type_0 = cast(list[str], data) 313 314 return workspace_subnets_type_0 315 except (TypeError, ValueError, AttributeError, KeyError): 316 pass 317 return cast(list[str] | None | Unset, data) 318 319 workspace_subnets = _parse_workspace_subnets(d.pop("workspaceSubnets", UNSET)) 320 321 max_spot_vcpu = d.pop("maxSpotVCPU", UNSET) 322 323 max_fpgavcpu = d.pop("maxFPGAVCPU", UNSET) 324 325 max_gpuvcpu = d.pop("maxGPUVCPU", UNSET) 326 327 enable_dragen = d.pop("enableDragen", UNSET) 328 329 def _parse_dragen_ami(data: object) -> None | str | Unset: 330 if data is None: 331 return data 332 if isinstance(data, Unset): 333 return data 334 return cast(None | str | Unset, data) 335 336 dragen_ami = _parse_dragen_ami(d.pop("dragenAmi", UNSET)) 337 338 max_workspaces_vcpu = d.pop("maxWorkspacesVCPU", UNSET) 339 340 max_workspaces_gpuvcpu = d.pop("maxWorkspacesGPUVCPU", UNSET) 341 342 max_workspaces_per_user = d.pop("maxWorkspacesPerUser", UNSET) 343 344 def _parse_is_discoverable(data: object) -> bool | None | Unset: 345 if data is None: 346 return data 347 if isinstance(data, Unset): 348 return data 349 return cast(bool | None | Unset, data) 350 351 is_discoverable = _parse_is_discoverable(d.pop("isDiscoverable", UNSET)) 352 353 def _parse_is_shareable(data: object) -> bool | None | Unset: 354 if data is None: 355 return data 356 if isinstance(data, Unset): 357 return data 358 return cast(bool | None | Unset, data) 359 360 is_shareable = _parse_is_shareable(d.pop("isShareable", UNSET)) 361 362 def _parse_has_pipelines_enabled(data: object) -> bool | None | Unset: 363 if data is None: 364 return data 365 if isinstance(data, Unset): 366 return data 367 return cast(bool | None | Unset, data) 368 369 has_pipelines_enabled = _parse_has_pipelines_enabled(d.pop("hasPipelinesEnabled", UNSET)) 370 371 def _parse_has_workspaces_enabled(data: object) -> bool | None | Unset: 372 if data is None: 373 return data 374 if isinstance(data, Unset): 375 return data 376 return cast(bool | None | Unset, data) 377 378 has_workspaces_enabled = _parse_has_workspaces_enabled(d.pop("hasWorkspacesEnabled", UNSET)) 379 380 project_settings = cls( 381 budget_amount=budget_amount, 382 budget_period=budget_period, 383 enable_backup=enable_backup, 384 enable_sftp=enable_sftp, 385 service_connections=service_connections, 386 kms_arn=kms_arn, 387 retention_policy_days=retention_policy_days, 388 temporary_storage_lifetime_days=temporary_storage_lifetime_days, 389 vpc_id=vpc_id, 390 batch_subnets=batch_subnets, 391 sagemaker_subnets=sagemaker_subnets, 392 workspace_subnets=workspace_subnets, 393 max_spot_vcpu=max_spot_vcpu, 394 max_fpgavcpu=max_fpgavcpu, 395 max_gpuvcpu=max_gpuvcpu, 396 enable_dragen=enable_dragen, 397 dragen_ami=dragen_ami, 398 max_workspaces_vcpu=max_workspaces_vcpu, 399 max_workspaces_gpuvcpu=max_workspaces_gpuvcpu, 400 max_workspaces_per_user=max_workspaces_per_user, 401 is_discoverable=is_discoverable, 402 is_shareable=is_shareable, 403 has_pipelines_enabled=has_pipelines_enabled, 404 has_workspaces_enabled=has_workspaces_enabled, 405 ) 406 407 project_settings.additional_properties = d 408 return project_settings
15@_attrs_define 16class ProjectUser: 17 """ 18 Attributes: 19 name (str): 20 username (str): 21 organization (str): 22 department (str): 23 email (str): 24 job_title (str): 25 role (ProjectRole): 26 """ 27 28 name: str 29 username: str 30 organization: str 31 department: str 32 email: str 33 job_title: str 34 role: ProjectRole 35 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 36 37 def to_dict(self) -> dict[str, Any]: 38 name = self.name 39 40 username = self.username 41 42 organization = self.organization 43 44 department = self.department 45 46 email = self.email 47 48 job_title = self.job_title 49 50 role = self.role.value 51 52 field_dict: dict[str, Any] = {} 53 field_dict.update(self.additional_properties) 54 field_dict.update( 55 { 56 "name": name, 57 "username": username, 58 "organization": organization, 59 "department": department, 60 "email": email, 61 "jobTitle": job_title, 62 "role": role, 63 } 64 ) 65 66 return field_dict 67 68 @classmethod 69 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 70 d = dict(src_dict) 71 name = d.pop("name") 72 73 username = d.pop("username") 74 75 organization = d.pop("organization") 76 77 department = d.pop("department") 78 79 email = d.pop("email") 80 81 job_title = d.pop("jobTitle") 82 83 role = ProjectRole(d.pop("role")) 84 85 project_user = cls( 86 name=name, 87 username=username, 88 organization=organization, 89 department=department, 90 email=email, 91 job_title=job_title, 92 role=role, 93 ) 94 95 project_user.additional_properties = d 96 return project_user 97 98 @property 99 def additional_keys(self) -> list[str]: 100 return list(self.additional_properties.keys()) 101 102 def __getitem__(self, key: str) -> Any: 103 return self.additional_properties[key] 104 105 def __setitem__(self, key: str, value: Any) -> None: 106 self.additional_properties[key] = value 107 108 def __delitem__(self, key: str) -> None: 109 del self.additional_properties[key] 110 111 def __contains__(self, key: str) -> bool: 112 return key in self.additional_properties
Attributes:
- name (str):
- username (str):
- organization (str):
- department (str):
- email (str):
- job_title (str):
- role (ProjectRole):
30def __init__(self, name, username, organization, department, email, job_title, role): 31 self.name = name 32 self.username = username 33 self.organization = organization 34 self.department = department 35 self.email = email 36 self.job_title = job_title 37 self.role = role 38 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectUser.
37 def to_dict(self) -> dict[str, Any]: 38 name = self.name 39 40 username = self.username 41 42 organization = self.organization 43 44 department = self.department 45 46 email = self.email 47 48 job_title = self.job_title 49 50 role = self.role.value 51 52 field_dict: dict[str, Any] = {} 53 field_dict.update(self.additional_properties) 54 field_dict.update( 55 { 56 "name": name, 57 "username": username, 58 "organization": organization, 59 "department": department, 60 "email": email, 61 "jobTitle": job_title, 62 "role": role, 63 } 64 ) 65 66 return field_dict
68 @classmethod 69 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 70 d = dict(src_dict) 71 name = d.pop("name") 72 73 username = d.pop("username") 74 75 organization = d.pop("organization") 76 77 department = d.pop("department") 78 79 email = d.pop("email") 80 81 job_title = d.pop("jobTitle") 82 83 role = ProjectRole(d.pop("role")) 84 85 project_user = cls( 86 name=name, 87 username=username, 88 organization=organization, 89 department=department, 90 email=email, 91 job_title=job_title, 92 role=role, 93 ) 94 95 project_user.additional_properties = d 96 return project_user
19@_attrs_define 20class Reference: 21 """ 22 Attributes: 23 id (str): 24 name (str): 25 description (str): 26 type_ (str): 27 files (list[FileEntry]): 28 created_by (str): 29 created_at (datetime.datetime): 30 """ 31 32 id: str 33 name: str 34 description: str 35 type_: str 36 files: list[FileEntry] 37 created_by: str 38 created_at: datetime.datetime 39 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 40 41 def to_dict(self) -> dict[str, Any]: 42 id = self.id 43 44 name = self.name 45 46 description = self.description 47 48 type_ = self.type_ 49 50 files = [] 51 for files_item_data in self.files: 52 files_item = files_item_data.to_dict() 53 files.append(files_item) 54 55 created_by = self.created_by 56 57 created_at = self.created_at.isoformat() 58 59 field_dict: dict[str, Any] = {} 60 field_dict.update(self.additional_properties) 61 field_dict.update( 62 { 63 "id": id, 64 "name": name, 65 "description": description, 66 "type": type_, 67 "files": files, 68 "createdBy": created_by, 69 "createdAt": created_at, 70 } 71 ) 72 73 return field_dict 74 75 @classmethod 76 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 77 from ..models.file_entry import FileEntry 78 79 d = dict(src_dict) 80 id = d.pop("id") 81 82 name = d.pop("name") 83 84 description = d.pop("description") 85 86 type_ = d.pop("type") 87 88 files = [] 89 _files = d.pop("files") 90 for files_item_data in _files: 91 files_item = FileEntry.from_dict(files_item_data) 92 93 files.append(files_item) 94 95 created_by = d.pop("createdBy") 96 97 created_at = isoparse(d.pop("createdAt")) 98 99 reference = cls( 100 id=id, 101 name=name, 102 description=description, 103 type_=type_, 104 files=files, 105 created_by=created_by, 106 created_at=created_at, 107 ) 108 109 reference.additional_properties = d 110 return reference 111 112 @property 113 def additional_keys(self) -> list[str]: 114 return list(self.additional_properties.keys()) 115 116 def __getitem__(self, key: str) -> Any: 117 return self.additional_properties[key] 118 119 def __setitem__(self, key: str, value: Any) -> None: 120 self.additional_properties[key] = value 121 122 def __delitem__(self, key: str) -> None: 123 del self.additional_properties[key] 124 125 def __contains__(self, key: str) -> bool: 126 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- description (str):
- type_ (str):
- files (list[FileEntry]):
- created_by (str):
- created_at (datetime.datetime):
30def __init__(self, id, name, description, type_, files, created_by, created_at): 31 self.id = id 32 self.name = name 33 self.description = description 34 self.type_ = type_ 35 self.files = files 36 self.created_by = created_by 37 self.created_at = created_at 38 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Reference.
41 def to_dict(self) -> dict[str, Any]: 42 id = self.id 43 44 name = self.name 45 46 description = self.description 47 48 type_ = self.type_ 49 50 files = [] 51 for files_item_data in self.files: 52 files_item = files_item_data.to_dict() 53 files.append(files_item) 54 55 created_by = self.created_by 56 57 created_at = self.created_at.isoformat() 58 59 field_dict: dict[str, Any] = {} 60 field_dict.update(self.additional_properties) 61 field_dict.update( 62 { 63 "id": id, 64 "name": name, 65 "description": description, 66 "type": type_, 67 "files": files, 68 "createdBy": created_by, 69 "createdAt": created_at, 70 } 71 ) 72 73 return field_dict
75 @classmethod 76 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 77 from ..models.file_entry import FileEntry 78 79 d = dict(src_dict) 80 id = d.pop("id") 81 82 name = d.pop("name") 83 84 description = d.pop("description") 85 86 type_ = d.pop("type") 87 88 files = [] 89 _files = d.pop("files") 90 for files_item_data in _files: 91 files_item = FileEntry.from_dict(files_item_data) 92 93 files.append(files_item) 94 95 created_by = d.pop("createdBy") 96 97 created_at = isoparse(d.pop("createdAt")) 98 99 reference = cls( 100 id=id, 101 name=name, 102 description=description, 103 type_=type_, 104 files=files, 105 created_by=created_by, 106 created_at=created_at, 107 ) 108 109 reference.additional_properties = d 110 return reference
17@_attrs_define 18class ReferenceType: 19 """ 20 Attributes: 21 name (str): 22 description (str): 23 directory (str): 24 validation (list[ReferenceTypeValidationItem]): 25 """ 26 27 name: str 28 description: str 29 directory: str 30 validation: list[ReferenceTypeValidationItem] 31 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 32 33 def to_dict(self) -> dict[str, Any]: 34 name = self.name 35 36 description = self.description 37 38 directory = self.directory 39 40 validation = [] 41 for validation_item_data in self.validation: 42 validation_item = validation_item_data.to_dict() 43 validation.append(validation_item) 44 45 field_dict: dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "name": name, 50 "description": description, 51 "directory": directory, 52 "validation": validation, 53 } 54 ) 55 56 return field_dict 57 58 @classmethod 59 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 60 from ..models.reference_type_validation_item import ReferenceTypeValidationItem 61 62 d = dict(src_dict) 63 name = d.pop("name") 64 65 description = d.pop("description") 66 67 directory = d.pop("directory") 68 69 validation = [] 70 _validation = d.pop("validation") 71 for validation_item_data in _validation: 72 validation_item = ReferenceTypeValidationItem.from_dict(validation_item_data) 73 74 validation.append(validation_item) 75 76 reference_type = cls( 77 name=name, 78 description=description, 79 directory=directory, 80 validation=validation, 81 ) 82 83 reference_type.additional_properties = d 84 return reference_type 85 86 @property 87 def additional_keys(self) -> list[str]: 88 return list(self.additional_properties.keys()) 89 90 def __getitem__(self, key: str) -> Any: 91 return self.additional_properties[key] 92 93 def __setitem__(self, key: str, value: Any) -> None: 94 self.additional_properties[key] = value 95 96 def __delitem__(self, key: str) -> None: 97 del self.additional_properties[key] 98 99 def __contains__(self, key: str) -> bool: 100 return key in self.additional_properties
Attributes:
- name (str):
- description (str):
- directory (str):
- validation (list[ReferenceTypeValidationItem]):
27def __init__(self, name, description, directory, validation): 28 self.name = name 29 self.description = description 30 self.directory = directory 31 self.validation = validation 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ReferenceType.
33 def to_dict(self) -> dict[str, Any]: 34 name = self.name 35 36 description = self.description 37 38 directory = self.directory 39 40 validation = [] 41 for validation_item_data in self.validation: 42 validation_item = validation_item_data.to_dict() 43 validation.append(validation_item) 44 45 field_dict: dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "name": name, 50 "description": description, 51 "directory": directory, 52 "validation": validation, 53 } 54 ) 55 56 return field_dict
58 @classmethod 59 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 60 from ..models.reference_type_validation_item import ReferenceTypeValidationItem 61 62 d = dict(src_dict) 63 name = d.pop("name") 64 65 description = d.pop("description") 66 67 directory = d.pop("directory") 68 69 validation = [] 70 _validation = d.pop("validation") 71 for validation_item_data in _validation: 72 validation_item = ReferenceTypeValidationItem.from_dict(validation_item_data) 73 74 validation.append(validation_item) 75 76 reference_type = cls( 77 name=name, 78 description=description, 79 directory=directory, 80 validation=validation, 81 ) 82 83 reference_type.additional_properties = d 84 return reference_type
13@_attrs_define 14class ReferenceTypeValidationItem: 15 """ """ 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 reference_type_validation_item = cls() 29 30 reference_type_validation_item.additional_properties = d 31 return reference_type_validation_item 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
5class RepositoryType(str, Enum): 6 AWS = "AWS" 7 GITHUB_PRIVATE = "GITHUB_PRIVATE" 8 GITHUB_PUBLIC = "GITHUB_PUBLIC" 9 NONE = "NONE" 10 UNKNOWN = "UNKNOWN" 11 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 12 13 def __str__(self) -> str: 14 return str(self.value) 15 16 @classmethod 17 def _missing_(cls, number): 18 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
5class RequestStatus(str, Enum): 6 ACCEPTED = "ACCEPTED" 7 DENIED = "DENIED" 8 PENDING = "PENDING" 9 UNKNOWN = "UNKNOWN" 10 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 11 12 def __str__(self) -> str: 13 return str(self.value) 14 15 @classmethod 16 def _missing_(cls, number): 17 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
17@_attrs_define 18class RequirementFulfillmentInput: 19 """ 20 Attributes: 21 file (None | str | Unset): 22 completed_on (datetime.datetime | None | Unset): If not provided, defaults to the current instant 23 """ 24 25 file: None | str | Unset = UNSET 26 completed_on: datetime.datetime | None | Unset = UNSET 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 file: None | str | Unset 31 if isinstance(self.file, Unset): 32 file = UNSET 33 else: 34 file = self.file 35 36 completed_on: None | str | Unset 37 if isinstance(self.completed_on, Unset): 38 completed_on = UNSET 39 elif isinstance(self.completed_on, datetime.datetime): 40 completed_on = self.completed_on.isoformat() 41 else: 42 completed_on = self.completed_on 43 44 field_dict: dict[str, Any] = {} 45 field_dict.update(self.additional_properties) 46 field_dict.update({}) 47 if file is not UNSET: 48 field_dict["file"] = file 49 if completed_on is not UNSET: 50 field_dict["completedOn"] = completed_on 51 52 return field_dict 53 54 @classmethod 55 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 56 d = dict(src_dict) 57 58 def _parse_file(data: object) -> None | str | Unset: 59 if data is None: 60 return data 61 if isinstance(data, Unset): 62 return data 63 return cast(None | str | Unset, data) 64 65 file = _parse_file(d.pop("file", UNSET)) 66 67 def _parse_completed_on(data: object) -> datetime.datetime | None | Unset: 68 if data is None: 69 return data 70 if isinstance(data, Unset): 71 return data 72 try: 73 if not isinstance(data, str): 74 raise TypeError() 75 completed_on_type_0 = isoparse(data) 76 77 return completed_on_type_0 78 except (TypeError, ValueError, AttributeError, KeyError): 79 pass 80 return cast(datetime.datetime | None | Unset, data) 81 82 completed_on = _parse_completed_on(d.pop("completedOn", UNSET)) 83 84 requirement_fulfillment_input = cls( 85 file=file, 86 completed_on=completed_on, 87 ) 88 89 requirement_fulfillment_input.additional_properties = d 90 return requirement_fulfillment_input 91 92 @property 93 def additional_keys(self) -> list[str]: 94 return list(self.additional_properties.keys()) 95 96 def __getitem__(self, key: str) -> Any: 97 return self.additional_properties[key] 98 99 def __setitem__(self, key: str, value: Any) -> None: 100 self.additional_properties[key] = value 101 102 def __delitem__(self, key: str) -> None: 103 del self.additional_properties[key] 104 105 def __contains__(self, key: str) -> bool: 106 return key in self.additional_properties
Attributes:
- file (None | str | Unset):
- completed_on (datetime.datetime | None | Unset): If not provided, defaults to the current instant
25def __init__(self, file=attr_dict['file'].default, completed_on=attr_dict['completed_on'].default): 26 self.file = file 27 self.completed_on = completed_on 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class RequirementFulfillmentInput.
29 def to_dict(self) -> dict[str, Any]: 30 file: None | str | Unset 31 if isinstance(self.file, Unset): 32 file = UNSET 33 else: 34 file = self.file 35 36 completed_on: None | str | Unset 37 if isinstance(self.completed_on, Unset): 38 completed_on = UNSET 39 elif isinstance(self.completed_on, datetime.datetime): 40 completed_on = self.completed_on.isoformat() 41 else: 42 completed_on = self.completed_on 43 44 field_dict: dict[str, Any] = {} 45 field_dict.update(self.additional_properties) 46 field_dict.update({}) 47 if file is not UNSET: 48 field_dict["file"] = file 49 if completed_on is not UNSET: 50 field_dict["completedOn"] = completed_on 51 52 return field_dict
54 @classmethod 55 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 56 d = dict(src_dict) 57 58 def _parse_file(data: object) -> None | str | Unset: 59 if data is None: 60 return data 61 if isinstance(data, Unset): 62 return data 63 return cast(None | str | Unset, data) 64 65 file = _parse_file(d.pop("file", UNSET)) 66 67 def _parse_completed_on(data: object) -> datetime.datetime | None | Unset: 68 if data is None: 69 return data 70 if isinstance(data, Unset): 71 return data 72 try: 73 if not isinstance(data, str): 74 raise TypeError() 75 completed_on_type_0 = isoparse(data) 76 77 return completed_on_type_0 78 except (TypeError, ValueError, AttributeError, KeyError): 79 pass 80 return cast(datetime.datetime | None | Unset, data) 81 82 completed_on = _parse_completed_on(d.pop("completedOn", UNSET)) 83 84 requirement_fulfillment_input = cls( 85 file=file, 86 completed_on=completed_on, 87 ) 88 89 requirement_fulfillment_input.additional_properties = d 90 return requirement_fulfillment_input
25@_attrs_define 26class RequirementInput: 27 """ 28 Attributes: 29 name (str): 30 description (str): 31 type_ (GovernanceType): The types of governance requirements that can be enforced 32 scope (GovernanceScope): The levels at which governance requirements can be enforced 33 contact_ids (list[str]): 34 expiration (GovernanceExpiry): 35 project_id (None | str | Unset): 36 acceptance (GovernanceScope | None | Unset): 37 enactment_date (datetime.datetime | None | Unset): 38 supplemental_docs (list[GovernanceFile] | None | Unset): 39 file (GovernanceFile | None | Unset): 40 authorship (GovernanceScope | None | Unset): 41 verification_method (GovernanceTrainingVerification | None | Unset): 42 """ 43 44 name: str 45 description: str 46 type_: GovernanceType 47 scope: GovernanceScope 48 contact_ids: list[str] 49 expiration: GovernanceExpiry 50 project_id: None | str | Unset = UNSET 51 acceptance: GovernanceScope | None | Unset = UNSET 52 enactment_date: datetime.datetime | None | Unset = UNSET 53 supplemental_docs: list[GovernanceFile] | None | Unset = UNSET 54 file: GovernanceFile | None | Unset = UNSET 55 authorship: GovernanceScope | None | Unset = UNSET 56 verification_method: GovernanceTrainingVerification | None | Unset = UNSET 57 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 58 59 def to_dict(self) -> dict[str, Any]: 60 from ..models.governance_file import GovernanceFile 61 62 name = self.name 63 64 description = self.description 65 66 type_ = self.type_.value 67 68 scope = self.scope.value 69 70 contact_ids = self.contact_ids 71 72 expiration = self.expiration.to_dict() 73 74 project_id: None | str | Unset 75 if isinstance(self.project_id, Unset): 76 project_id = UNSET 77 else: 78 project_id = self.project_id 79 80 acceptance: None | str | Unset 81 if isinstance(self.acceptance, Unset): 82 acceptance = UNSET 83 elif isinstance(self.acceptance, GovernanceScope): 84 acceptance = self.acceptance.value 85 else: 86 acceptance = self.acceptance 87 88 enactment_date: None | str | Unset 89 if isinstance(self.enactment_date, Unset): 90 enactment_date = UNSET 91 elif isinstance(self.enactment_date, datetime.datetime): 92 enactment_date = self.enactment_date.isoformat() 93 else: 94 enactment_date = self.enactment_date 95 96 supplemental_docs: list[dict[str, Any]] | None | Unset 97 if isinstance(self.supplemental_docs, Unset): 98 supplemental_docs = UNSET 99 elif isinstance(self.supplemental_docs, list): 100 supplemental_docs = [] 101 for supplemental_docs_type_0_item_data in self.supplemental_docs: 102 supplemental_docs_type_0_item = supplemental_docs_type_0_item_data.to_dict() 103 supplemental_docs.append(supplemental_docs_type_0_item) 104 105 else: 106 supplemental_docs = self.supplemental_docs 107 108 file: dict[str, Any] | None | Unset 109 if isinstance(self.file, Unset): 110 file = UNSET 111 elif isinstance(self.file, GovernanceFile): 112 file = self.file.to_dict() 113 else: 114 file = self.file 115 116 authorship: None | str | Unset 117 if isinstance(self.authorship, Unset): 118 authorship = UNSET 119 elif isinstance(self.authorship, GovernanceScope): 120 authorship = self.authorship.value 121 else: 122 authorship = self.authorship 123 124 verification_method: None | str | Unset 125 if isinstance(self.verification_method, Unset): 126 verification_method = UNSET 127 elif isinstance(self.verification_method, GovernanceTrainingVerification): 128 verification_method = self.verification_method.value 129 else: 130 verification_method = self.verification_method 131 132 field_dict: dict[str, Any] = {} 133 field_dict.update(self.additional_properties) 134 field_dict.update( 135 { 136 "name": name, 137 "description": description, 138 "type": type_, 139 "scope": scope, 140 "contactIds": contact_ids, 141 "expiration": expiration, 142 } 143 ) 144 if project_id is not UNSET: 145 field_dict["projectId"] = project_id 146 if acceptance is not UNSET: 147 field_dict["acceptance"] = acceptance 148 if enactment_date is not UNSET: 149 field_dict["enactmentDate"] = enactment_date 150 if supplemental_docs is not UNSET: 151 field_dict["supplementalDocs"] = supplemental_docs 152 if file is not UNSET: 153 field_dict["file"] = file 154 if authorship is not UNSET: 155 field_dict["authorship"] = authorship 156 if verification_method is not UNSET: 157 field_dict["verificationMethod"] = verification_method 158 159 return field_dict 160 161 @classmethod 162 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 163 from ..models.governance_expiry import GovernanceExpiry 164 from ..models.governance_file import GovernanceFile 165 166 d = dict(src_dict) 167 name = d.pop("name") 168 169 description = d.pop("description") 170 171 type_ = GovernanceType(d.pop("type")) 172 173 scope = GovernanceScope(d.pop("scope")) 174 175 contact_ids = cast(list[str], d.pop("contactIds")) 176 177 expiration = GovernanceExpiry.from_dict(d.pop("expiration")) 178 179 def _parse_project_id(data: object) -> None | str | Unset: 180 if data is None: 181 return data 182 if isinstance(data, Unset): 183 return data 184 return cast(None | str | Unset, data) 185 186 project_id = _parse_project_id(d.pop("projectId", UNSET)) 187 188 def _parse_acceptance(data: object) -> GovernanceScope | None | Unset: 189 if data is None: 190 return data 191 if isinstance(data, Unset): 192 return data 193 try: 194 if not isinstance(data, str): 195 raise TypeError() 196 acceptance_type_1 = GovernanceScope(data) 197 198 return acceptance_type_1 199 except (TypeError, ValueError, AttributeError, KeyError): 200 pass 201 return cast(GovernanceScope | None | Unset, data) 202 203 acceptance = _parse_acceptance(d.pop("acceptance", UNSET)) 204 205 def _parse_enactment_date(data: object) -> datetime.datetime | None | Unset: 206 if data is None: 207 return data 208 if isinstance(data, Unset): 209 return data 210 try: 211 if not isinstance(data, str): 212 raise TypeError() 213 enactment_date_type_0 = isoparse(data) 214 215 return enactment_date_type_0 216 except (TypeError, ValueError, AttributeError, KeyError): 217 pass 218 return cast(datetime.datetime | None | Unset, data) 219 220 enactment_date = _parse_enactment_date(d.pop("enactmentDate", UNSET)) 221 222 def _parse_supplemental_docs(data: object) -> list[GovernanceFile] | None | Unset: 223 if data is None: 224 return data 225 if isinstance(data, Unset): 226 return data 227 try: 228 if not isinstance(data, list): 229 raise TypeError() 230 supplemental_docs_type_0 = [] 231 _supplemental_docs_type_0 = data 232 for supplemental_docs_type_0_item_data in _supplemental_docs_type_0: 233 supplemental_docs_type_0_item = GovernanceFile.from_dict(supplemental_docs_type_0_item_data) 234 235 supplemental_docs_type_0.append(supplemental_docs_type_0_item) 236 237 return supplemental_docs_type_0 238 except (TypeError, ValueError, AttributeError, KeyError): 239 pass 240 return cast(list[GovernanceFile] | None | Unset, data) 241 242 supplemental_docs = _parse_supplemental_docs(d.pop("supplementalDocs", UNSET)) 243 244 def _parse_file(data: object) -> GovernanceFile | None | Unset: 245 if data is None: 246 return data 247 if isinstance(data, Unset): 248 return data 249 try: 250 if not isinstance(data, dict): 251 raise TypeError() 252 file_type_1 = GovernanceFile.from_dict(data) 253 254 return file_type_1 255 except (TypeError, ValueError, AttributeError, KeyError): 256 pass 257 return cast(GovernanceFile | None | Unset, data) 258 259 file = _parse_file(d.pop("file", UNSET)) 260 261 def _parse_authorship(data: object) -> GovernanceScope | None | Unset: 262 if data is None: 263 return data 264 if isinstance(data, Unset): 265 return data 266 try: 267 if not isinstance(data, str): 268 raise TypeError() 269 authorship_type_1 = GovernanceScope(data) 270 271 return authorship_type_1 272 except (TypeError, ValueError, AttributeError, KeyError): 273 pass 274 return cast(GovernanceScope | None | Unset, data) 275 276 authorship = _parse_authorship(d.pop("authorship", UNSET)) 277 278 def _parse_verification_method(data: object) -> GovernanceTrainingVerification | None | Unset: 279 if data is None: 280 return data 281 if isinstance(data, Unset): 282 return data 283 try: 284 if not isinstance(data, str): 285 raise TypeError() 286 verification_method_type_1 = GovernanceTrainingVerification(data) 287 288 return verification_method_type_1 289 except (TypeError, ValueError, AttributeError, KeyError): 290 pass 291 return cast(GovernanceTrainingVerification | None | Unset, data) 292 293 verification_method = _parse_verification_method(d.pop("verificationMethod", UNSET)) 294 295 requirement_input = cls( 296 name=name, 297 description=description, 298 type_=type_, 299 scope=scope, 300 contact_ids=contact_ids, 301 expiration=expiration, 302 project_id=project_id, 303 acceptance=acceptance, 304 enactment_date=enactment_date, 305 supplemental_docs=supplemental_docs, 306 file=file, 307 authorship=authorship, 308 verification_method=verification_method, 309 ) 310 311 requirement_input.additional_properties = d 312 return requirement_input 313 314 @property 315 def additional_keys(self) -> list[str]: 316 return list(self.additional_properties.keys()) 317 318 def __getitem__(self, key: str) -> Any: 319 return self.additional_properties[key] 320 321 def __setitem__(self, key: str, value: Any) -> None: 322 self.additional_properties[key] = value 323 324 def __delitem__(self, key: str) -> None: 325 del self.additional_properties[key] 326 327 def __contains__(self, key: str) -> bool: 328 return key in self.additional_properties
Attributes:
- name (str):
- description (str):
- type_ (GovernanceType): The types of governance requirements that can be enforced
- scope (GovernanceScope): The levels at which governance requirements can be enforced
- contact_ids (list[str]):
- expiration (GovernanceExpiry):
- project_id (None | str | Unset):
- acceptance (GovernanceScope | None | Unset):
- enactment_date (datetime.datetime | None | Unset):
- supplemental_docs (list[GovernanceFile] | None | Unset):
- file (GovernanceFile | None | Unset):
- authorship (GovernanceScope | None | Unset):
- verification_method (GovernanceTrainingVerification | None | Unset):
36def __init__(self, name, description, type_, scope, contact_ids, expiration, project_id=attr_dict['project_id'].default, acceptance=attr_dict['acceptance'].default, enactment_date=attr_dict['enactment_date'].default, supplemental_docs=attr_dict['supplemental_docs'].default, file=attr_dict['file'].default, authorship=attr_dict['authorship'].default, verification_method=attr_dict['verification_method'].default): 37 self.name = name 38 self.description = description 39 self.type_ = type_ 40 self.scope = scope 41 self.contact_ids = contact_ids 42 self.expiration = expiration 43 self.project_id = project_id 44 self.acceptance = acceptance 45 self.enactment_date = enactment_date 46 self.supplemental_docs = supplemental_docs 47 self.file = file 48 self.authorship = authorship 49 self.verification_method = verification_method 50 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class RequirementInput.
59 def to_dict(self) -> dict[str, Any]: 60 from ..models.governance_file import GovernanceFile 61 62 name = self.name 63 64 description = self.description 65 66 type_ = self.type_.value 67 68 scope = self.scope.value 69 70 contact_ids = self.contact_ids 71 72 expiration = self.expiration.to_dict() 73 74 project_id: None | str | Unset 75 if isinstance(self.project_id, Unset): 76 project_id = UNSET 77 else: 78 project_id = self.project_id 79 80 acceptance: None | str | Unset 81 if isinstance(self.acceptance, Unset): 82 acceptance = UNSET 83 elif isinstance(self.acceptance, GovernanceScope): 84 acceptance = self.acceptance.value 85 else: 86 acceptance = self.acceptance 87 88 enactment_date: None | str | Unset 89 if isinstance(self.enactment_date, Unset): 90 enactment_date = UNSET 91 elif isinstance(self.enactment_date, datetime.datetime): 92 enactment_date = self.enactment_date.isoformat() 93 else: 94 enactment_date = self.enactment_date 95 96 supplemental_docs: list[dict[str, Any]] | None | Unset 97 if isinstance(self.supplemental_docs, Unset): 98 supplemental_docs = UNSET 99 elif isinstance(self.supplemental_docs, list): 100 supplemental_docs = [] 101 for supplemental_docs_type_0_item_data in self.supplemental_docs: 102 supplemental_docs_type_0_item = supplemental_docs_type_0_item_data.to_dict() 103 supplemental_docs.append(supplemental_docs_type_0_item) 104 105 else: 106 supplemental_docs = self.supplemental_docs 107 108 file: dict[str, Any] | None | Unset 109 if isinstance(self.file, Unset): 110 file = UNSET 111 elif isinstance(self.file, GovernanceFile): 112 file = self.file.to_dict() 113 else: 114 file = self.file 115 116 authorship: None | str | Unset 117 if isinstance(self.authorship, Unset): 118 authorship = UNSET 119 elif isinstance(self.authorship, GovernanceScope): 120 authorship = self.authorship.value 121 else: 122 authorship = self.authorship 123 124 verification_method: None | str | Unset 125 if isinstance(self.verification_method, Unset): 126 verification_method = UNSET 127 elif isinstance(self.verification_method, GovernanceTrainingVerification): 128 verification_method = self.verification_method.value 129 else: 130 verification_method = self.verification_method 131 132 field_dict: dict[str, Any] = {} 133 field_dict.update(self.additional_properties) 134 field_dict.update( 135 { 136 "name": name, 137 "description": description, 138 "type": type_, 139 "scope": scope, 140 "contactIds": contact_ids, 141 "expiration": expiration, 142 } 143 ) 144 if project_id is not UNSET: 145 field_dict["projectId"] = project_id 146 if acceptance is not UNSET: 147 field_dict["acceptance"] = acceptance 148 if enactment_date is not UNSET: 149 field_dict["enactmentDate"] = enactment_date 150 if supplemental_docs is not UNSET: 151 field_dict["supplementalDocs"] = supplemental_docs 152 if file is not UNSET: 153 field_dict["file"] = file 154 if authorship is not UNSET: 155 field_dict["authorship"] = authorship 156 if verification_method is not UNSET: 157 field_dict["verificationMethod"] = verification_method 158 159 return field_dict
161 @classmethod 162 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 163 from ..models.governance_expiry import GovernanceExpiry 164 from ..models.governance_file import GovernanceFile 165 166 d = dict(src_dict) 167 name = d.pop("name") 168 169 description = d.pop("description") 170 171 type_ = GovernanceType(d.pop("type")) 172 173 scope = GovernanceScope(d.pop("scope")) 174 175 contact_ids = cast(list[str], d.pop("contactIds")) 176 177 expiration = GovernanceExpiry.from_dict(d.pop("expiration")) 178 179 def _parse_project_id(data: object) -> None | str | Unset: 180 if data is None: 181 return data 182 if isinstance(data, Unset): 183 return data 184 return cast(None | str | Unset, data) 185 186 project_id = _parse_project_id(d.pop("projectId", UNSET)) 187 188 def _parse_acceptance(data: object) -> GovernanceScope | None | Unset: 189 if data is None: 190 return data 191 if isinstance(data, Unset): 192 return data 193 try: 194 if not isinstance(data, str): 195 raise TypeError() 196 acceptance_type_1 = GovernanceScope(data) 197 198 return acceptance_type_1 199 except (TypeError, ValueError, AttributeError, KeyError): 200 pass 201 return cast(GovernanceScope | None | Unset, data) 202 203 acceptance = _parse_acceptance(d.pop("acceptance", UNSET)) 204 205 def _parse_enactment_date(data: object) -> datetime.datetime | None | Unset: 206 if data is None: 207 return data 208 if isinstance(data, Unset): 209 return data 210 try: 211 if not isinstance(data, str): 212 raise TypeError() 213 enactment_date_type_0 = isoparse(data) 214 215 return enactment_date_type_0 216 except (TypeError, ValueError, AttributeError, KeyError): 217 pass 218 return cast(datetime.datetime | None | Unset, data) 219 220 enactment_date = _parse_enactment_date(d.pop("enactmentDate", UNSET)) 221 222 def _parse_supplemental_docs(data: object) -> list[GovernanceFile] | None | Unset: 223 if data is None: 224 return data 225 if isinstance(data, Unset): 226 return data 227 try: 228 if not isinstance(data, list): 229 raise TypeError() 230 supplemental_docs_type_0 = [] 231 _supplemental_docs_type_0 = data 232 for supplemental_docs_type_0_item_data in _supplemental_docs_type_0: 233 supplemental_docs_type_0_item = GovernanceFile.from_dict(supplemental_docs_type_0_item_data) 234 235 supplemental_docs_type_0.append(supplemental_docs_type_0_item) 236 237 return supplemental_docs_type_0 238 except (TypeError, ValueError, AttributeError, KeyError): 239 pass 240 return cast(list[GovernanceFile] | None | Unset, data) 241 242 supplemental_docs = _parse_supplemental_docs(d.pop("supplementalDocs", UNSET)) 243 244 def _parse_file(data: object) -> GovernanceFile | None | Unset: 245 if data is None: 246 return data 247 if isinstance(data, Unset): 248 return data 249 try: 250 if not isinstance(data, dict): 251 raise TypeError() 252 file_type_1 = GovernanceFile.from_dict(data) 253 254 return file_type_1 255 except (TypeError, ValueError, AttributeError, KeyError): 256 pass 257 return cast(GovernanceFile | None | Unset, data) 258 259 file = _parse_file(d.pop("file", UNSET)) 260 261 def _parse_authorship(data: object) -> GovernanceScope | None | Unset: 262 if data is None: 263 return data 264 if isinstance(data, Unset): 265 return data 266 try: 267 if not isinstance(data, str): 268 raise TypeError() 269 authorship_type_1 = GovernanceScope(data) 270 271 return authorship_type_1 272 except (TypeError, ValueError, AttributeError, KeyError): 273 pass 274 return cast(GovernanceScope | None | Unset, data) 275 276 authorship = _parse_authorship(d.pop("authorship", UNSET)) 277 278 def _parse_verification_method(data: object) -> GovernanceTrainingVerification | None | Unset: 279 if data is None: 280 return data 281 if isinstance(data, Unset): 282 return data 283 try: 284 if not isinstance(data, str): 285 raise TypeError() 286 verification_method_type_1 = GovernanceTrainingVerification(data) 287 288 return verification_method_type_1 289 except (TypeError, ValueError, AttributeError, KeyError): 290 pass 291 return cast(GovernanceTrainingVerification | None | Unset, data) 292 293 verification_method = _parse_verification_method(d.pop("verificationMethod", UNSET)) 294 295 requirement_input = cls( 296 name=name, 297 description=description, 298 type_=type_, 299 scope=scope, 300 contact_ids=contact_ids, 301 expiration=expiration, 302 project_id=project_id, 303 acceptance=acceptance, 304 enactment_date=enactment_date, 305 supplemental_docs=supplemental_docs, 306 file=file, 307 authorship=authorship, 308 verification_method=verification_method, 309 ) 310 311 requirement_input.additional_properties = d 312 return requirement_input
15@_attrs_define 16class ResourcesInfo: 17 """ 18 Attributes: 19 commit (str): 20 date (datetime.datetime): 21 repository (str): 22 source_version (str): 23 """ 24 25 commit: str 26 date: datetime.datetime 27 repository: str 28 source_version: str 29 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 30 31 def to_dict(self) -> dict[str, Any]: 32 commit = self.commit 33 34 date = self.date.isoformat() 35 36 repository = self.repository 37 38 source_version = self.source_version 39 40 field_dict: dict[str, Any] = {} 41 field_dict.update(self.additional_properties) 42 field_dict.update( 43 { 44 "commit": commit, 45 "date": date, 46 "repository": repository, 47 "sourceVersion": source_version, 48 } 49 ) 50 51 return field_dict 52 53 @classmethod 54 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 55 d = dict(src_dict) 56 commit = d.pop("commit") 57 58 date = isoparse(d.pop("date")) 59 60 repository = d.pop("repository") 61 62 source_version = d.pop("sourceVersion") 63 64 resources_info = cls( 65 commit=commit, 66 date=date, 67 repository=repository, 68 source_version=source_version, 69 ) 70 71 resources_info.additional_properties = d 72 return resources_info 73 74 @property 75 def additional_keys(self) -> list[str]: 76 return list(self.additional_properties.keys()) 77 78 def __getitem__(self, key: str) -> Any: 79 return self.additional_properties[key] 80 81 def __setitem__(self, key: str, value: Any) -> None: 82 self.additional_properties[key] = value 83 84 def __delitem__(self, key: str) -> None: 85 del self.additional_properties[key] 86 87 def __contains__(self, key: str) -> bool: 88 return key in self.additional_properties
Attributes:
- commit (str):
- date (datetime.datetime):
- repository (str):
- source_version (str):
27def __init__(self, commit, date, repository, source_version): 28 self.commit = commit 29 self.date = date 30 self.repository = repository 31 self.source_version = source_version 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ResourcesInfo.
31 def to_dict(self) -> dict[str, Any]: 32 commit = self.commit 33 34 date = self.date.isoformat() 35 36 repository = self.repository 37 38 source_version = self.source_version 39 40 field_dict: dict[str, Any] = {} 41 field_dict.update(self.additional_properties) 42 field_dict.update( 43 { 44 "commit": commit, 45 "date": date, 46 "repository": repository, 47 "sourceVersion": source_version, 48 } 49 ) 50 51 return field_dict
53 @classmethod 54 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 55 d = dict(src_dict) 56 commit = d.pop("commit") 57 58 date = isoparse(d.pop("date")) 59 60 repository = d.pop("repository") 61 62 source_version = d.pop("sourceVersion") 63 64 resources_info = cls( 65 commit=commit, 66 date=date, 67 repository=repository, 68 source_version=source_version, 69 ) 70 71 resources_info.additional_properties = d 72 return resources_info
20@_attrs_define 21class RunAnalysisRequest: 22 """ 23 Attributes: 24 name (str): Name of the dataset 25 process_id (str): Process ID of the workflow Example: process-nf-core-rnaseq-3_8. 26 source_dataset_ids (list[str]): These datasets contain files that are inputs to this workflow. 27 params (RunAnalysisRequestParams): Parameters used in workflow (can be empty) 28 notification_emails (list[str]): Emails to notify upon workflow success or failure 29 description (None | str | Unset): Description of the dataset (optional) 30 source_sample_ids (list[str] | None | Unset): Samples within the source datasets that will be used as inputs to 31 this workflow. If not specified, all samples will be used. 32 source_sample_files_map (None | RunAnalysisRequestSourceSampleFilesMap | Unset): Files containing samples used 33 to define source data input to this workflow. If not specified, all files will be used. Keys are sampleIds, and 34 the lists are file paths to include. 35 resume_dataset_id (None | str | Unset): Used for caching task execution. If the parameters are the same as the 36 dataset specified here, it will re-use the output to minimize duplicate work 37 compute_environment_id (None | str | Unset): The compute environment where to run the workflow, if not 38 specified, it will run in AWS 39 """ 40 41 name: str 42 process_id: str 43 source_dataset_ids: list[str] 44 params: RunAnalysisRequestParams 45 notification_emails: list[str] 46 description: None | str | Unset = UNSET 47 source_sample_ids: list[str] | None | Unset = UNSET 48 source_sample_files_map: None | RunAnalysisRequestSourceSampleFilesMap | Unset = UNSET 49 resume_dataset_id: None | str | Unset = UNSET 50 compute_environment_id: None | str | Unset = UNSET 51 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 52 53 def to_dict(self) -> dict[str, Any]: 54 from ..models.run_analysis_request_source_sample_files_map import RunAnalysisRequestSourceSampleFilesMap 55 56 name = self.name 57 58 process_id = self.process_id 59 60 source_dataset_ids = self.source_dataset_ids 61 62 params = self.params.to_dict() 63 64 notification_emails = self.notification_emails 65 66 description: None | str | Unset 67 if isinstance(self.description, Unset): 68 description = UNSET 69 else: 70 description = self.description 71 72 source_sample_ids: list[str] | None | Unset 73 if isinstance(self.source_sample_ids, Unset): 74 source_sample_ids = UNSET 75 elif isinstance(self.source_sample_ids, list): 76 source_sample_ids = self.source_sample_ids 77 78 else: 79 source_sample_ids = self.source_sample_ids 80 81 source_sample_files_map: dict[str, Any] | None | Unset 82 if isinstance(self.source_sample_files_map, Unset): 83 source_sample_files_map = UNSET 84 elif isinstance(self.source_sample_files_map, RunAnalysisRequestSourceSampleFilesMap): 85 source_sample_files_map = self.source_sample_files_map.to_dict() 86 else: 87 source_sample_files_map = self.source_sample_files_map 88 89 resume_dataset_id: None | str | Unset 90 if isinstance(self.resume_dataset_id, Unset): 91 resume_dataset_id = UNSET 92 else: 93 resume_dataset_id = self.resume_dataset_id 94 95 compute_environment_id: None | str | Unset 96 if isinstance(self.compute_environment_id, Unset): 97 compute_environment_id = UNSET 98 else: 99 compute_environment_id = self.compute_environment_id 100 101 field_dict: dict[str, Any] = {} 102 field_dict.update(self.additional_properties) 103 field_dict.update( 104 { 105 "name": name, 106 "processId": process_id, 107 "sourceDatasetIds": source_dataset_ids, 108 "params": params, 109 "notificationEmails": notification_emails, 110 } 111 ) 112 if description is not UNSET: 113 field_dict["description"] = description 114 if source_sample_ids is not UNSET: 115 field_dict["sourceSampleIds"] = source_sample_ids 116 if source_sample_files_map is not UNSET: 117 field_dict["sourceSampleFilesMap"] = source_sample_files_map 118 if resume_dataset_id is not UNSET: 119 field_dict["resumeDatasetId"] = resume_dataset_id 120 if compute_environment_id is not UNSET: 121 field_dict["computeEnvironmentId"] = compute_environment_id 122 123 return field_dict 124 125 @classmethod 126 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 127 from ..models.run_analysis_request_params import RunAnalysisRequestParams 128 from ..models.run_analysis_request_source_sample_files_map import RunAnalysisRequestSourceSampleFilesMap 129 130 d = dict(src_dict) 131 name = d.pop("name") 132 133 process_id = d.pop("processId") 134 135 source_dataset_ids = cast(list[str], d.pop("sourceDatasetIds")) 136 137 params = RunAnalysisRequestParams.from_dict(d.pop("params")) 138 139 notification_emails = cast(list[str], d.pop("notificationEmails")) 140 141 def _parse_description(data: object) -> None | str | Unset: 142 if data is None: 143 return data 144 if isinstance(data, Unset): 145 return data 146 return cast(None | str | Unset, data) 147 148 description = _parse_description(d.pop("description", UNSET)) 149 150 def _parse_source_sample_ids(data: object) -> list[str] | None | Unset: 151 if data is None: 152 return data 153 if isinstance(data, Unset): 154 return data 155 try: 156 if not isinstance(data, list): 157 raise TypeError() 158 source_sample_ids_type_0 = cast(list[str], data) 159 160 return source_sample_ids_type_0 161 except (TypeError, ValueError, AttributeError, KeyError): 162 pass 163 return cast(list[str] | None | Unset, data) 164 165 source_sample_ids = _parse_source_sample_ids(d.pop("sourceSampleIds", UNSET)) 166 167 def _parse_source_sample_files_map(data: object) -> None | RunAnalysisRequestSourceSampleFilesMap | Unset: 168 if data is None: 169 return data 170 if isinstance(data, Unset): 171 return data 172 try: 173 if not isinstance(data, dict): 174 raise TypeError() 175 source_sample_files_map_type_0 = RunAnalysisRequestSourceSampleFilesMap.from_dict(data) 176 177 return source_sample_files_map_type_0 178 except (TypeError, ValueError, AttributeError, KeyError): 179 pass 180 return cast(None | RunAnalysisRequestSourceSampleFilesMap | Unset, data) 181 182 source_sample_files_map = _parse_source_sample_files_map(d.pop("sourceSampleFilesMap", UNSET)) 183 184 def _parse_resume_dataset_id(data: object) -> None | str | Unset: 185 if data is None: 186 return data 187 if isinstance(data, Unset): 188 return data 189 return cast(None | str | Unset, data) 190 191 resume_dataset_id = _parse_resume_dataset_id(d.pop("resumeDatasetId", UNSET)) 192 193 def _parse_compute_environment_id(data: object) -> None | str | Unset: 194 if data is None: 195 return data 196 if isinstance(data, Unset): 197 return data 198 return cast(None | str | Unset, data) 199 200 compute_environment_id = _parse_compute_environment_id(d.pop("computeEnvironmentId", UNSET)) 201 202 run_analysis_request = cls( 203 name=name, 204 process_id=process_id, 205 source_dataset_ids=source_dataset_ids, 206 params=params, 207 notification_emails=notification_emails, 208 description=description, 209 source_sample_ids=source_sample_ids, 210 source_sample_files_map=source_sample_files_map, 211 resume_dataset_id=resume_dataset_id, 212 compute_environment_id=compute_environment_id, 213 ) 214 215 run_analysis_request.additional_properties = d 216 return run_analysis_request 217 218 @property 219 def additional_keys(self) -> list[str]: 220 return list(self.additional_properties.keys()) 221 222 def __getitem__(self, key: str) -> Any: 223 return self.additional_properties[key] 224 225 def __setitem__(self, key: str, value: Any) -> None: 226 self.additional_properties[key] = value 227 228 def __delitem__(self, key: str) -> None: 229 del self.additional_properties[key] 230 231 def __contains__(self, key: str) -> bool: 232 return key in self.additional_properties
Attributes:
- name (str): Name of the dataset
- process_id (str): Process ID of the workflow Example: process-nf-core-rnaseq-3_8.
- source_dataset_ids (list[str]): These datasets contain files that are inputs to this workflow.
- params (RunAnalysisRequestParams): Parameters used in workflow (can be empty)
- notification_emails (list[str]): Emails to notify upon workflow success or failure
- description (None | str | Unset): Description of the dataset (optional)
- source_sample_ids (list[str] | None | Unset): Samples within the source datasets that will be used as inputs to this workflow. If not specified, all samples will be used.
- source_sample_files_map (None | RunAnalysisRequestSourceSampleFilesMap | Unset): Files containing samples used to define source data input to this workflow. If not specified, all files will be used. Keys are sampleIds, and the lists are file paths to include.
- resume_dataset_id (None | str | Unset): Used for caching task execution. If the parameters are the same as the dataset specified here, it will re-use the output to minimize duplicate work
- compute_environment_id (None | str | Unset): The compute environment where to run the workflow, if not specified, it will run in AWS
33def __init__(self, name, process_id, source_dataset_ids, params, notification_emails, description=attr_dict['description'].default, source_sample_ids=attr_dict['source_sample_ids'].default, source_sample_files_map=attr_dict['source_sample_files_map'].default, resume_dataset_id=attr_dict['resume_dataset_id'].default, compute_environment_id=attr_dict['compute_environment_id'].default): 34 self.name = name 35 self.process_id = process_id 36 self.source_dataset_ids = source_dataset_ids 37 self.params = params 38 self.notification_emails = notification_emails 39 self.description = description 40 self.source_sample_ids = source_sample_ids 41 self.source_sample_files_map = source_sample_files_map 42 self.resume_dataset_id = resume_dataset_id 43 self.compute_environment_id = compute_environment_id 44 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class RunAnalysisRequest.
53 def to_dict(self) -> dict[str, Any]: 54 from ..models.run_analysis_request_source_sample_files_map import RunAnalysisRequestSourceSampleFilesMap 55 56 name = self.name 57 58 process_id = self.process_id 59 60 source_dataset_ids = self.source_dataset_ids 61 62 params = self.params.to_dict() 63 64 notification_emails = self.notification_emails 65 66 description: None | str | Unset 67 if isinstance(self.description, Unset): 68 description = UNSET 69 else: 70 description = self.description 71 72 source_sample_ids: list[str] | None | Unset 73 if isinstance(self.source_sample_ids, Unset): 74 source_sample_ids = UNSET 75 elif isinstance(self.source_sample_ids, list): 76 source_sample_ids = self.source_sample_ids 77 78 else: 79 source_sample_ids = self.source_sample_ids 80 81 source_sample_files_map: dict[str, Any] | None | Unset 82 if isinstance(self.source_sample_files_map, Unset): 83 source_sample_files_map = UNSET 84 elif isinstance(self.source_sample_files_map, RunAnalysisRequestSourceSampleFilesMap): 85 source_sample_files_map = self.source_sample_files_map.to_dict() 86 else: 87 source_sample_files_map = self.source_sample_files_map 88 89 resume_dataset_id: None | str | Unset 90 if isinstance(self.resume_dataset_id, Unset): 91 resume_dataset_id = UNSET 92 else: 93 resume_dataset_id = self.resume_dataset_id 94 95 compute_environment_id: None | str | Unset 96 if isinstance(self.compute_environment_id, Unset): 97 compute_environment_id = UNSET 98 else: 99 compute_environment_id = self.compute_environment_id 100 101 field_dict: dict[str, Any] = {} 102 field_dict.update(self.additional_properties) 103 field_dict.update( 104 { 105 "name": name, 106 "processId": process_id, 107 "sourceDatasetIds": source_dataset_ids, 108 "params": params, 109 "notificationEmails": notification_emails, 110 } 111 ) 112 if description is not UNSET: 113 field_dict["description"] = description 114 if source_sample_ids is not UNSET: 115 field_dict["sourceSampleIds"] = source_sample_ids 116 if source_sample_files_map is not UNSET: 117 field_dict["sourceSampleFilesMap"] = source_sample_files_map 118 if resume_dataset_id is not UNSET: 119 field_dict["resumeDatasetId"] = resume_dataset_id 120 if compute_environment_id is not UNSET: 121 field_dict["computeEnvironmentId"] = compute_environment_id 122 123 return field_dict
125 @classmethod 126 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 127 from ..models.run_analysis_request_params import RunAnalysisRequestParams 128 from ..models.run_analysis_request_source_sample_files_map import RunAnalysisRequestSourceSampleFilesMap 129 130 d = dict(src_dict) 131 name = d.pop("name") 132 133 process_id = d.pop("processId") 134 135 source_dataset_ids = cast(list[str], d.pop("sourceDatasetIds")) 136 137 params = RunAnalysisRequestParams.from_dict(d.pop("params")) 138 139 notification_emails = cast(list[str], d.pop("notificationEmails")) 140 141 def _parse_description(data: object) -> None | str | Unset: 142 if data is None: 143 return data 144 if isinstance(data, Unset): 145 return data 146 return cast(None | str | Unset, data) 147 148 description = _parse_description(d.pop("description", UNSET)) 149 150 def _parse_source_sample_ids(data: object) -> list[str] | None | Unset: 151 if data is None: 152 return data 153 if isinstance(data, Unset): 154 return data 155 try: 156 if not isinstance(data, list): 157 raise TypeError() 158 source_sample_ids_type_0 = cast(list[str], data) 159 160 return source_sample_ids_type_0 161 except (TypeError, ValueError, AttributeError, KeyError): 162 pass 163 return cast(list[str] | None | Unset, data) 164 165 source_sample_ids = _parse_source_sample_ids(d.pop("sourceSampleIds", UNSET)) 166 167 def _parse_source_sample_files_map(data: object) -> None | RunAnalysisRequestSourceSampleFilesMap | Unset: 168 if data is None: 169 return data 170 if isinstance(data, Unset): 171 return data 172 try: 173 if not isinstance(data, dict): 174 raise TypeError() 175 source_sample_files_map_type_0 = RunAnalysisRequestSourceSampleFilesMap.from_dict(data) 176 177 return source_sample_files_map_type_0 178 except (TypeError, ValueError, AttributeError, KeyError): 179 pass 180 return cast(None | RunAnalysisRequestSourceSampleFilesMap | Unset, data) 181 182 source_sample_files_map = _parse_source_sample_files_map(d.pop("sourceSampleFilesMap", UNSET)) 183 184 def _parse_resume_dataset_id(data: object) -> None | str | Unset: 185 if data is None: 186 return data 187 if isinstance(data, Unset): 188 return data 189 return cast(None | str | Unset, data) 190 191 resume_dataset_id = _parse_resume_dataset_id(d.pop("resumeDatasetId", UNSET)) 192 193 def _parse_compute_environment_id(data: object) -> None | str | Unset: 194 if data is None: 195 return data 196 if isinstance(data, Unset): 197 return data 198 return cast(None | str | Unset, data) 199 200 compute_environment_id = _parse_compute_environment_id(d.pop("computeEnvironmentId", UNSET)) 201 202 run_analysis_request = cls( 203 name=name, 204 process_id=process_id, 205 source_dataset_ids=source_dataset_ids, 206 params=params, 207 notification_emails=notification_emails, 208 description=description, 209 source_sample_ids=source_sample_ids, 210 source_sample_files_map=source_sample_files_map, 211 resume_dataset_id=resume_dataset_id, 212 compute_environment_id=compute_environment_id, 213 ) 214 215 run_analysis_request.additional_properties = d 216 return run_analysis_request
13@_attrs_define 14class RunAnalysisRequestParams: 15 """Parameters used in workflow (can be empty)""" 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 run_analysis_request_params = cls() 29 30 run_analysis_request_params.additional_properties = d 31 return run_analysis_request_params 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
Parameters used in workflow (can be empty)
13@_attrs_define 14class RunAnalysisRequestSourceSampleFilesMap: 15 """Files containing samples used to define source data input to this workflow. If not specified, all files will be 16 used. Keys are sampleIds, and the lists are file paths to include. 17 18 """ 19 20 additional_properties: dict[str, list[str]] = _attrs_field(init=False, factory=dict) 21 22 def to_dict(self) -> dict[str, Any]: 23 field_dict: dict[str, Any] = {} 24 for prop_name, prop in self.additional_properties.items(): 25 field_dict[prop_name] = prop 26 27 return field_dict 28 29 @classmethod 30 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 31 d = dict(src_dict) 32 run_analysis_request_source_sample_files_map = cls() 33 34 additional_properties = {} 35 for prop_name, prop_dict in d.items(): 36 additional_property = cast(list[str], prop_dict) 37 38 additional_properties[prop_name] = additional_property 39 40 run_analysis_request_source_sample_files_map.additional_properties = additional_properties 41 return run_analysis_request_source_sample_files_map 42 43 @property 44 def additional_keys(self) -> list[str]: 45 return list(self.additional_properties.keys()) 46 47 def __getitem__(self, key: str) -> list[str]: 48 return self.additional_properties[key] 49 50 def __setitem__(self, key: str, value: list[str]) -> None: 51 self.additional_properties[key] = value 52 53 def __delitem__(self, key: str) -> None: 54 del self.additional_properties[key] 55 56 def __contains__(self, key: str) -> bool: 57 return key in self.additional_properties
Files containing samples used to define source data input to this workflow. If not specified, all files will be used. Keys are sampleIds, and the lists are file paths to include.
Method generated by attrs for class RunAnalysisRequestSourceSampleFilesMap.
29 @classmethod 30 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 31 d = dict(src_dict) 32 run_analysis_request_source_sample_files_map = cls() 33 34 additional_properties = {} 35 for prop_name, prop_dict in d.items(): 36 additional_property = cast(list[str], prop_dict) 37 38 additional_properties[prop_name] = additional_property 39 40 run_analysis_request_source_sample_files_map.additional_properties = additional_properties 41 return run_analysis_request_source_sample_files_map
22@_attrs_define 23class Sample: 24 """ 25 Attributes: 26 id (str): 27 name (str): 28 metadata (None | SampleMetadata | Unset): 29 files (list[DataFile] | None | Unset): Files associated with this sample 30 dataset_ids (list[str] | None | Unset): 31 created_at (datetime.datetime | None | Unset): 32 updated_at (datetime.datetime | None | Unset): 33 """ 34 35 id: str 36 name: str 37 metadata: None | SampleMetadata | Unset = UNSET 38 files: list[DataFile] | None | Unset = UNSET 39 dataset_ids: list[str] | None | Unset = UNSET 40 created_at: datetime.datetime | None | Unset = UNSET 41 updated_at: datetime.datetime | None | Unset = UNSET 42 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 43 44 def to_dict(self) -> dict[str, Any]: 45 from ..models.sample_metadata import SampleMetadata 46 47 id = self.id 48 49 name = self.name 50 51 metadata: dict[str, Any] | None | Unset 52 if isinstance(self.metadata, Unset): 53 metadata = UNSET 54 elif isinstance(self.metadata, SampleMetadata): 55 metadata = self.metadata.to_dict() 56 else: 57 metadata = self.metadata 58 59 files: list[dict[str, Any]] | None | Unset 60 if isinstance(self.files, Unset): 61 files = UNSET 62 elif isinstance(self.files, list): 63 files = [] 64 for files_type_0_item_data in self.files: 65 files_type_0_item = files_type_0_item_data.to_dict() 66 files.append(files_type_0_item) 67 68 else: 69 files = self.files 70 71 dataset_ids: list[str] | None | Unset 72 if isinstance(self.dataset_ids, Unset): 73 dataset_ids = UNSET 74 elif isinstance(self.dataset_ids, list): 75 dataset_ids = self.dataset_ids 76 77 else: 78 dataset_ids = self.dataset_ids 79 80 created_at: None | str | Unset 81 if isinstance(self.created_at, Unset): 82 created_at = UNSET 83 elif isinstance(self.created_at, datetime.datetime): 84 created_at = self.created_at.isoformat() 85 else: 86 created_at = self.created_at 87 88 updated_at: None | str | Unset 89 if isinstance(self.updated_at, Unset): 90 updated_at = UNSET 91 elif isinstance(self.updated_at, datetime.datetime): 92 updated_at = self.updated_at.isoformat() 93 else: 94 updated_at = self.updated_at 95 96 field_dict: dict[str, Any] = {} 97 field_dict.update(self.additional_properties) 98 field_dict.update( 99 { 100 "id": id, 101 "name": name, 102 } 103 ) 104 if metadata is not UNSET: 105 field_dict["metadata"] = metadata 106 if files is not UNSET: 107 field_dict["files"] = files 108 if dataset_ids is not UNSET: 109 field_dict["datasetIds"] = dataset_ids 110 if created_at is not UNSET: 111 field_dict["createdAt"] = created_at 112 if updated_at is not UNSET: 113 field_dict["updatedAt"] = updated_at 114 115 return field_dict 116 117 @classmethod 118 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 119 from ..models.data_file import DataFile 120 from ..models.sample_metadata import SampleMetadata 121 122 d = dict(src_dict) 123 id = d.pop("id") 124 125 name = d.pop("name") 126 127 def _parse_metadata(data: object) -> None | SampleMetadata | Unset: 128 if data is None: 129 return data 130 if isinstance(data, Unset): 131 return data 132 try: 133 if not isinstance(data, dict): 134 raise TypeError() 135 metadata_type_0 = SampleMetadata.from_dict(data) 136 137 return metadata_type_0 138 except (TypeError, ValueError, AttributeError, KeyError): 139 pass 140 return cast(None | SampleMetadata | Unset, data) 141 142 metadata = _parse_metadata(d.pop("metadata", UNSET)) 143 144 def _parse_files(data: object) -> list[DataFile] | None | Unset: 145 if data is None: 146 return data 147 if isinstance(data, Unset): 148 return data 149 try: 150 if not isinstance(data, list): 151 raise TypeError() 152 files_type_0 = [] 153 _files_type_0 = data 154 for files_type_0_item_data in _files_type_0: 155 files_type_0_item = DataFile.from_dict(files_type_0_item_data) 156 157 files_type_0.append(files_type_0_item) 158 159 return files_type_0 160 except (TypeError, ValueError, AttributeError, KeyError): 161 pass 162 return cast(list[DataFile] | None | Unset, data) 163 164 files = _parse_files(d.pop("files", UNSET)) 165 166 def _parse_dataset_ids(data: object) -> list[str] | None | Unset: 167 if data is None: 168 return data 169 if isinstance(data, Unset): 170 return data 171 try: 172 if not isinstance(data, list): 173 raise TypeError() 174 dataset_ids_type_0 = cast(list[str], data) 175 176 return dataset_ids_type_0 177 except (TypeError, ValueError, AttributeError, KeyError): 178 pass 179 return cast(list[str] | None | Unset, data) 180 181 dataset_ids = _parse_dataset_ids(d.pop("datasetIds", UNSET)) 182 183 def _parse_created_at(data: object) -> datetime.datetime | None | Unset: 184 if data is None: 185 return data 186 if isinstance(data, Unset): 187 return data 188 try: 189 if not isinstance(data, str): 190 raise TypeError() 191 created_at_type_0 = isoparse(data) 192 193 return created_at_type_0 194 except (TypeError, ValueError, AttributeError, KeyError): 195 pass 196 return cast(datetime.datetime | None | Unset, data) 197 198 created_at = _parse_created_at(d.pop("createdAt", UNSET)) 199 200 def _parse_updated_at(data: object) -> datetime.datetime | None | Unset: 201 if data is None: 202 return data 203 if isinstance(data, Unset): 204 return data 205 try: 206 if not isinstance(data, str): 207 raise TypeError() 208 updated_at_type_0 = isoparse(data) 209 210 return updated_at_type_0 211 except (TypeError, ValueError, AttributeError, KeyError): 212 pass 213 return cast(datetime.datetime | None | Unset, data) 214 215 updated_at = _parse_updated_at(d.pop("updatedAt", UNSET)) 216 217 sample = cls( 218 id=id, 219 name=name, 220 metadata=metadata, 221 files=files, 222 dataset_ids=dataset_ids, 223 created_at=created_at, 224 updated_at=updated_at, 225 ) 226 227 sample.additional_properties = d 228 return sample 229 230 @property 231 def additional_keys(self) -> list[str]: 232 return list(self.additional_properties.keys()) 233 234 def __getitem__(self, key: str) -> Any: 235 return self.additional_properties[key] 236 237 def __setitem__(self, key: str, value: Any) -> None: 238 self.additional_properties[key] = value 239 240 def __delitem__(self, key: str) -> None: 241 del self.additional_properties[key] 242 243 def __contains__(self, key: str) -> bool: 244 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- metadata (None | SampleMetadata | Unset):
- files (list[DataFile] | None | Unset): Files associated with this sample
- dataset_ids (list[str] | None | Unset):
- created_at (datetime.datetime | None | Unset):
- updated_at (datetime.datetime | None | Unset):
30def __init__(self, id, name, metadata=attr_dict['metadata'].default, files=attr_dict['files'].default, dataset_ids=attr_dict['dataset_ids'].default, created_at=attr_dict['created_at'].default, updated_at=attr_dict['updated_at'].default): 31 self.id = id 32 self.name = name 33 self.metadata = metadata 34 self.files = files 35 self.dataset_ids = dataset_ids 36 self.created_at = created_at 37 self.updated_at = updated_at 38 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Sample.
44 def to_dict(self) -> dict[str, Any]: 45 from ..models.sample_metadata import SampleMetadata 46 47 id = self.id 48 49 name = self.name 50 51 metadata: dict[str, Any] | None | Unset 52 if isinstance(self.metadata, Unset): 53 metadata = UNSET 54 elif isinstance(self.metadata, SampleMetadata): 55 metadata = self.metadata.to_dict() 56 else: 57 metadata = self.metadata 58 59 files: list[dict[str, Any]] | None | Unset 60 if isinstance(self.files, Unset): 61 files = UNSET 62 elif isinstance(self.files, list): 63 files = [] 64 for files_type_0_item_data in self.files: 65 files_type_0_item = files_type_0_item_data.to_dict() 66 files.append(files_type_0_item) 67 68 else: 69 files = self.files 70 71 dataset_ids: list[str] | None | Unset 72 if isinstance(self.dataset_ids, Unset): 73 dataset_ids = UNSET 74 elif isinstance(self.dataset_ids, list): 75 dataset_ids = self.dataset_ids 76 77 else: 78 dataset_ids = self.dataset_ids 79 80 created_at: None | str | Unset 81 if isinstance(self.created_at, Unset): 82 created_at = UNSET 83 elif isinstance(self.created_at, datetime.datetime): 84 created_at = self.created_at.isoformat() 85 else: 86 created_at = self.created_at 87 88 updated_at: None | str | Unset 89 if isinstance(self.updated_at, Unset): 90 updated_at = UNSET 91 elif isinstance(self.updated_at, datetime.datetime): 92 updated_at = self.updated_at.isoformat() 93 else: 94 updated_at = self.updated_at 95 96 field_dict: dict[str, Any] = {} 97 field_dict.update(self.additional_properties) 98 field_dict.update( 99 { 100 "id": id, 101 "name": name, 102 } 103 ) 104 if metadata is not UNSET: 105 field_dict["metadata"] = metadata 106 if files is not UNSET: 107 field_dict["files"] = files 108 if dataset_ids is not UNSET: 109 field_dict["datasetIds"] = dataset_ids 110 if created_at is not UNSET: 111 field_dict["createdAt"] = created_at 112 if updated_at is not UNSET: 113 field_dict["updatedAt"] = updated_at 114 115 return field_dict
117 @classmethod 118 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 119 from ..models.data_file import DataFile 120 from ..models.sample_metadata import SampleMetadata 121 122 d = dict(src_dict) 123 id = d.pop("id") 124 125 name = d.pop("name") 126 127 def _parse_metadata(data: object) -> None | SampleMetadata | Unset: 128 if data is None: 129 return data 130 if isinstance(data, Unset): 131 return data 132 try: 133 if not isinstance(data, dict): 134 raise TypeError() 135 metadata_type_0 = SampleMetadata.from_dict(data) 136 137 return metadata_type_0 138 except (TypeError, ValueError, AttributeError, KeyError): 139 pass 140 return cast(None | SampleMetadata | Unset, data) 141 142 metadata = _parse_metadata(d.pop("metadata", UNSET)) 143 144 def _parse_files(data: object) -> list[DataFile] | None | Unset: 145 if data is None: 146 return data 147 if isinstance(data, Unset): 148 return data 149 try: 150 if not isinstance(data, list): 151 raise TypeError() 152 files_type_0 = [] 153 _files_type_0 = data 154 for files_type_0_item_data in _files_type_0: 155 files_type_0_item = DataFile.from_dict(files_type_0_item_data) 156 157 files_type_0.append(files_type_0_item) 158 159 return files_type_0 160 except (TypeError, ValueError, AttributeError, KeyError): 161 pass 162 return cast(list[DataFile] | None | Unset, data) 163 164 files = _parse_files(d.pop("files", UNSET)) 165 166 def _parse_dataset_ids(data: object) -> list[str] | None | Unset: 167 if data is None: 168 return data 169 if isinstance(data, Unset): 170 return data 171 try: 172 if not isinstance(data, list): 173 raise TypeError() 174 dataset_ids_type_0 = cast(list[str], data) 175 176 return dataset_ids_type_0 177 except (TypeError, ValueError, AttributeError, KeyError): 178 pass 179 return cast(list[str] | None | Unset, data) 180 181 dataset_ids = _parse_dataset_ids(d.pop("datasetIds", UNSET)) 182 183 def _parse_created_at(data: object) -> datetime.datetime | None | Unset: 184 if data is None: 185 return data 186 if isinstance(data, Unset): 187 return data 188 try: 189 if not isinstance(data, str): 190 raise TypeError() 191 created_at_type_0 = isoparse(data) 192 193 return created_at_type_0 194 except (TypeError, ValueError, AttributeError, KeyError): 195 pass 196 return cast(datetime.datetime | None | Unset, data) 197 198 created_at = _parse_created_at(d.pop("createdAt", UNSET)) 199 200 def _parse_updated_at(data: object) -> datetime.datetime | None | Unset: 201 if data is None: 202 return data 203 if isinstance(data, Unset): 204 return data 205 try: 206 if not isinstance(data, str): 207 raise TypeError() 208 updated_at_type_0 = isoparse(data) 209 210 return updated_at_type_0 211 except (TypeError, ValueError, AttributeError, KeyError): 212 pass 213 return cast(datetime.datetime | None | Unset, data) 214 215 updated_at = _parse_updated_at(d.pop("updatedAt", UNSET)) 216 217 sample = cls( 218 id=id, 219 name=name, 220 metadata=metadata, 221 files=files, 222 dataset_ids=dataset_ids, 223 created_at=created_at, 224 updated_at=updated_at, 225 ) 226 227 sample.additional_properties = d 228 return sample
13@_attrs_define 14class SampleMetadata: 15 """ """ 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 sample_metadata = cls() 29 30 sample_metadata.additional_properties = d 31 return sample_metadata 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
17@_attrs_define 18class SampleRequest: 19 """ 20 Attributes: 21 name (str): 22 metadata (SampleRequestMetadata): 23 """ 24 25 name: str 26 metadata: SampleRequestMetadata 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 name = self.name 31 32 metadata = self.metadata.to_dict() 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "name": name, 39 "metadata": metadata, 40 } 41 ) 42 43 return field_dict 44 45 @classmethod 46 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 47 from ..models.sample_request_metadata import SampleRequestMetadata 48 49 d = dict(src_dict) 50 name = d.pop("name") 51 52 metadata = SampleRequestMetadata.from_dict(d.pop("metadata")) 53 54 sample_request = cls( 55 name=name, 56 metadata=metadata, 57 ) 58 59 sample_request.additional_properties = d 60 return sample_request 61 62 @property 63 def additional_keys(self) -> list[str]: 64 return list(self.additional_properties.keys()) 65 66 def __getitem__(self, key: str) -> Any: 67 return self.additional_properties[key] 68 69 def __setitem__(self, key: str, value: Any) -> None: 70 self.additional_properties[key] = value 71 72 def __delitem__(self, key: str) -> None: 73 del self.additional_properties[key] 74 75 def __contains__(self, key: str) -> bool: 76 return key in self.additional_properties
Attributes:
- name (str):
- metadata (SampleRequestMetadata):
25def __init__(self, name, metadata): 26 self.name = name 27 self.metadata = metadata 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class SampleRequest.
45 @classmethod 46 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 47 from ..models.sample_request_metadata import SampleRequestMetadata 48 49 d = dict(src_dict) 50 name = d.pop("name") 51 52 metadata = SampleRequestMetadata.from_dict(d.pop("metadata")) 53 54 sample_request = cls( 55 name=name, 56 metadata=metadata, 57 ) 58 59 sample_request.additional_properties = d 60 return sample_request
13@_attrs_define 14class SampleRequestMetadata: 15 """ """ 16 17 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> dict[str, Any]: 20 field_dict: dict[str, Any] = {} 21 field_dict.update(self.additional_properties) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 27 d = dict(src_dict) 28 sample_request_metadata = cls() 29 30 sample_request_metadata.additional_properties = d 31 return sample_request_metadata 32 33 @property 34 def additional_keys(self) -> list[str]: 35 return list(self.additional_properties.keys()) 36 37 def __getitem__(self, key: str) -> Any: 38 return self.additional_properties[key] 39 40 def __setitem__(self, key: str, value: Any) -> None: 41 self.additional_properties[key] = value 42 43 def __delitem__(self, key: str) -> None: 44 del self.additional_properties[key] 45 46 def __contains__(self, key: str) -> bool: 47 return key in self.additional_properties
15@_attrs_define 16class SampleSheets: 17 """ 18 Attributes: 19 samples (str | Unset): Written to samplesheet.csv, available as ds.samplesheet in preprocess 20 files (str | Unset): Written to files.csv, available as ds.files in preprocess 21 """ 22 23 samples: str | Unset = UNSET 24 files: str | Unset = UNSET 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 samples = self.samples 29 30 files = self.files 31 32 field_dict: dict[str, Any] = {} 33 field_dict.update(self.additional_properties) 34 field_dict.update({}) 35 if samples is not UNSET: 36 field_dict["samples"] = samples 37 if files is not UNSET: 38 field_dict["files"] = files 39 40 return field_dict 41 42 @classmethod 43 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 44 d = dict(src_dict) 45 samples = d.pop("samples", UNSET) 46 47 files = d.pop("files", UNSET) 48 49 sample_sheets = cls( 50 samples=samples, 51 files=files, 52 ) 53 54 sample_sheets.additional_properties = d 55 return sample_sheets 56 57 @property 58 def additional_keys(self) -> list[str]: 59 return list(self.additional_properties.keys()) 60 61 def __getitem__(self, key: str) -> Any: 62 return self.additional_properties[key] 63 64 def __setitem__(self, key: str, value: Any) -> None: 65 self.additional_properties[key] = value 66 67 def __delitem__(self, key: str) -> None: 68 del self.additional_properties[key] 69 70 def __contains__(self, key: str) -> bool: 71 return key in self.additional_properties
Attributes:
- samples (str | Unset): Written to samplesheet.csv, available as ds.samplesheet in preprocess
- files (str | Unset): Written to files.csv, available as ds.files in preprocess
25def __init__(self, samples=attr_dict['samples'].default, files=attr_dict['files'].default): 26 self.samples = samples 27 self.files = files 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class SampleSheets.
27 def to_dict(self) -> dict[str, Any]: 28 samples = self.samples 29 30 files = self.files 31 32 field_dict: dict[str, Any] = {} 33 field_dict.update(self.additional_properties) 34 field_dict.update({}) 35 if samples is not UNSET: 36 field_dict["samples"] = samples 37 if files is not UNSET: 38 field_dict["files"] = files 39 40 return field_dict
42 @classmethod 43 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 44 d = dict(src_dict) 45 samples = d.pop("samples", UNSET) 46 47 files = d.pop("files", UNSET) 48 49 sample_sheets = cls( 50 samples=samples, 51 files=files, 52 ) 53 54 sample_sheets.additional_properties = d 55 return sample_sheets
13@_attrs_define 14class ServiceConnection: 15 """ 16 Attributes: 17 name (str): 18 description (str): 19 """ 20 21 name: str 22 description: str 23 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 24 25 def to_dict(self) -> dict[str, Any]: 26 name = self.name 27 28 description = self.description 29 30 field_dict: dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "name": name, 35 "description": description, 36 } 37 ) 38 39 return field_dict 40 41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 name = d.pop("name") 45 46 description = d.pop("description") 47 48 service_connection = cls( 49 name=name, 50 description=description, 51 ) 52 53 service_connection.additional_properties = d 54 return service_connection 55 56 @property 57 def additional_keys(self) -> list[str]: 58 return list(self.additional_properties.keys()) 59 60 def __getitem__(self, key: str) -> Any: 61 return self.additional_properties[key] 62 63 def __setitem__(self, key: str, value: Any) -> None: 64 self.additional_properties[key] = value 65 66 def __delitem__(self, key: str) -> None: 67 del self.additional_properties[key] 68 69 def __contains__(self, key: str) -> bool: 70 return key in self.additional_properties
Attributes:
- name (str):
- description (str):
25def __init__(self, name, description): 26 self.name = name 27 self.description = description 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ServiceConnection.
41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 name = d.pop("name") 45 46 description = d.pop("description") 47 48 service_connection = cls( 49 name=name, 50 description=description, 51 ) 52 53 service_connection.additional_properties = d 54 return service_connection
16@_attrs_define 17class SetUserProjectRoleRequest: 18 """ 19 Attributes: 20 username (str): 21 role (ProjectRole): 22 suppress_notification (bool | Unset): Default: False. 23 """ 24 25 username: str 26 role: ProjectRole 27 suppress_notification: bool | Unset = False 28 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 29 30 def to_dict(self) -> dict[str, Any]: 31 username = self.username 32 33 role = self.role.value 34 35 suppress_notification = self.suppress_notification 36 37 field_dict: dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "username": username, 42 "role": role, 43 } 44 ) 45 if suppress_notification is not UNSET: 46 field_dict["suppressNotification"] = suppress_notification 47 48 return field_dict 49 50 @classmethod 51 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 52 d = dict(src_dict) 53 username = d.pop("username") 54 55 role = ProjectRole(d.pop("role")) 56 57 suppress_notification = d.pop("suppressNotification", UNSET) 58 59 set_user_project_role_request = cls( 60 username=username, 61 role=role, 62 suppress_notification=suppress_notification, 63 ) 64 65 set_user_project_role_request.additional_properties = d 66 return set_user_project_role_request 67 68 @property 69 def additional_keys(self) -> list[str]: 70 return list(self.additional_properties.keys()) 71 72 def __getitem__(self, key: str) -> Any: 73 return self.additional_properties[key] 74 75 def __setitem__(self, key: str, value: Any) -> None: 76 self.additional_properties[key] = value 77 78 def __delitem__(self, key: str) -> None: 79 del self.additional_properties[key] 80 81 def __contains__(self, key: str) -> bool: 82 return key in self.additional_properties
Attributes:
- username (str):
- role (ProjectRole):
- suppress_notification (bool | Unset): Default: False.
26def __init__(self, username, role, suppress_notification=attr_dict['suppress_notification'].default): 27 self.username = username 28 self.role = role 29 self.suppress_notification = suppress_notification 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class SetUserProjectRoleRequest.
30 def to_dict(self) -> dict[str, Any]: 31 username = self.username 32 33 role = self.role.value 34 35 suppress_notification = self.suppress_notification 36 37 field_dict: dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "username": username, 42 "role": role, 43 } 44 ) 45 if suppress_notification is not UNSET: 46 field_dict["suppressNotification"] = suppress_notification 47 48 return field_dict
50 @classmethod 51 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 52 d = dict(src_dict) 53 username = d.pop("username") 54 55 role = ProjectRole(d.pop("role")) 56 57 suppress_notification = d.pop("suppressNotification", UNSET) 58 59 set_user_project_role_request = cls( 60 username=username, 61 role=role, 62 suppress_notification=suppress_notification, 63 ) 64 65 set_user_project_role_request.additional_properties = d 66 return set_user_project_role_request
15@_attrs_define 16class SftpCredentials: 17 """ 18 Attributes: 19 username (str): 20 password (str): 21 project_id (str): 22 expires_at (datetime.datetime): 23 """ 24 25 username: str 26 password: str 27 project_id: str 28 expires_at: datetime.datetime 29 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 30 31 def to_dict(self) -> dict[str, Any]: 32 username = self.username 33 34 password = self.password 35 36 project_id = self.project_id 37 38 expires_at = self.expires_at.isoformat() 39 40 field_dict: dict[str, Any] = {} 41 field_dict.update(self.additional_properties) 42 field_dict.update( 43 { 44 "username": username, 45 "password": password, 46 "projectId": project_id, 47 "expiresAt": expires_at, 48 } 49 ) 50 51 return field_dict 52 53 @classmethod 54 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 55 d = dict(src_dict) 56 username = d.pop("username") 57 58 password = d.pop("password") 59 60 project_id = d.pop("projectId") 61 62 expires_at = isoparse(d.pop("expiresAt")) 63 64 sftp_credentials = cls( 65 username=username, 66 password=password, 67 project_id=project_id, 68 expires_at=expires_at, 69 ) 70 71 sftp_credentials.additional_properties = d 72 return sftp_credentials 73 74 @property 75 def additional_keys(self) -> list[str]: 76 return list(self.additional_properties.keys()) 77 78 def __getitem__(self, key: str) -> Any: 79 return self.additional_properties[key] 80 81 def __setitem__(self, key: str, value: Any) -> None: 82 self.additional_properties[key] = value 83 84 def __delitem__(self, key: str) -> None: 85 del self.additional_properties[key] 86 87 def __contains__(self, key: str) -> bool: 88 return key in self.additional_properties
Attributes:
- username (str):
- password (str):
- project_id (str):
- expires_at (datetime.datetime):
27def __init__(self, username, password, project_id, expires_at): 28 self.username = username 29 self.password = password 30 self.project_id = project_id 31 self.expires_at = expires_at 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class SftpCredentials.
31 def to_dict(self) -> dict[str, Any]: 32 username = self.username 33 34 password = self.password 35 36 project_id = self.project_id 37 38 expires_at = self.expires_at.isoformat() 39 40 field_dict: dict[str, Any] = {} 41 field_dict.update(self.additional_properties) 42 field_dict.update( 43 { 44 "username": username, 45 "password": password, 46 "projectId": project_id, 47 "expiresAt": expires_at, 48 } 49 ) 50 51 return field_dict
53 @classmethod 54 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 55 d = dict(src_dict) 56 username = d.pop("username") 57 58 password = d.pop("password") 59 60 project_id = d.pop("projectId") 61 62 expires_at = isoparse(d.pop("expiresAt")) 63 64 sftp_credentials = cls( 65 username=username, 66 password=password, 67 project_id=project_id, 68 expires_at=expires_at, 69 ) 70 71 sftp_credentials.additional_properties = d 72 return sftp_credentials
5class SharingType(str, Enum): 6 PRIVATE = "PRIVATE" 7 READ_WRITE = "READ_WRITE" 8 READ_WRITE_CONTROL = "READ_WRITE_CONTROL" 9 UNKNOWN = "UNKNOWN" 10 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 11 12 def __str__(self) -> str: 13 return str(self.value) 14 15 @classmethod 16 def _missing_(cls, number): 17 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
5class SortOrder(str, Enum): 6 ASCENDING = "ASCENDING" 7 DESCENDING = "DESCENDING" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
5class Status(str, Enum): 6 ARCHIVED = "ARCHIVED" 7 COMPLETED = "COMPLETED" 8 DELETE = "DELETE" 9 DELETED = "DELETED" 10 DELETING = "DELETING" 11 FAILED = "FAILED" 12 PENDING = "PENDING" 13 RUNNING = "RUNNING" 14 STARTING = "STARTING" 15 STOPPING = "STOPPING" 16 SUSPENDED = "SUSPENDED" 17 UNKNOWN = "UNKNOWN" 18 19 def __str__(self) -> str: 20 return str(self.value) 21 22 @classmethod 23 def _missing_(cls, number): 24 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
15@_attrs_define 16class StopExecutionResponse: 17 """ 18 Attributes: 19 success (list[str] | Unset): List of job IDs that were successful in termination 20 failed (list[str] | Unset): List of job IDs that were not successful in termination 21 """ 22 23 success: list[str] | Unset = UNSET 24 failed: list[str] | Unset = UNSET 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 success: list[str] | Unset = UNSET 29 if not isinstance(self.success, Unset): 30 success = self.success 31 32 failed: list[str] | Unset = UNSET 33 if not isinstance(self.failed, Unset): 34 failed = self.failed 35 36 field_dict: dict[str, Any] = {} 37 field_dict.update(self.additional_properties) 38 field_dict.update({}) 39 if success is not UNSET: 40 field_dict["success"] = success 41 if failed is not UNSET: 42 field_dict["failed"] = failed 43 44 return field_dict 45 46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 success = cast(list[str], d.pop("success", UNSET)) 50 51 failed = cast(list[str], d.pop("failed", UNSET)) 52 53 stop_execution_response = cls( 54 success=success, 55 failed=failed, 56 ) 57 58 stop_execution_response.additional_properties = d 59 return stop_execution_response 60 61 @property 62 def additional_keys(self) -> list[str]: 63 return list(self.additional_properties.keys()) 64 65 def __getitem__(self, key: str) -> Any: 66 return self.additional_properties[key] 67 68 def __setitem__(self, key: str, value: Any) -> None: 69 self.additional_properties[key] = value 70 71 def __delitem__(self, key: str) -> None: 72 del self.additional_properties[key] 73 74 def __contains__(self, key: str) -> bool: 75 return key in self.additional_properties
Attributes:
- success (list[str] | Unset): List of job IDs that were successful in termination
- failed (list[str] | Unset): List of job IDs that were not successful in termination
25def __init__(self, success=attr_dict['success'].default, failed=attr_dict['failed'].default): 26 self.success = success 27 self.failed = failed 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class StopExecutionResponse.
27 def to_dict(self) -> dict[str, Any]: 28 success: list[str] | Unset = UNSET 29 if not isinstance(self.success, Unset): 30 success = self.success 31 32 failed: list[str] | Unset = UNSET 33 if not isinstance(self.failed, Unset): 34 failed = self.failed 35 36 field_dict: dict[str, Any] = {} 37 field_dict.update(self.additional_properties) 38 field_dict.update({}) 39 if success is not UNSET: 40 field_dict["success"] = success 41 if failed is not UNSET: 42 field_dict["failed"] = failed 43 44 return field_dict
46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 success = cast(list[str], d.pop("success", UNSET)) 50 51 failed = cast(list[str], d.pop("failed", UNSET)) 52 53 stop_execution_response = cls( 54 success=success, 55 failed=failed, 56 ) 57 58 stop_execution_response.additional_properties = d 59 return stop_execution_response
5class SyncStatus(str, Enum): 6 FAILED = "FAILED" 7 SUCCESSFUL = "SUCCESSFUL" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
19@_attrs_define 20class SystemInfoResponse: 21 """ 22 Attributes: 23 resources_bucket (str): 24 references_bucket (str): 25 live_endpoint (str): 26 agent_endpoint (str): 27 region (str): 28 system_message (str): 29 maintenance_mode_enabled (bool): 30 commit_hash (str): 31 version (str): 32 resources_info (ResourcesInfo): 33 tenant_info (TenantInfo): 34 auth (AuthInfo): 35 """ 36 37 resources_bucket: str 38 references_bucket: str 39 live_endpoint: str 40 agent_endpoint: str 41 region: str 42 system_message: str 43 maintenance_mode_enabled: bool 44 commit_hash: str 45 version: str 46 resources_info: ResourcesInfo 47 tenant_info: TenantInfo 48 auth: AuthInfo 49 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 50 51 def to_dict(self) -> dict[str, Any]: 52 resources_bucket = self.resources_bucket 53 54 references_bucket = self.references_bucket 55 56 live_endpoint = self.live_endpoint 57 58 agent_endpoint = self.agent_endpoint 59 60 region = self.region 61 62 system_message = self.system_message 63 64 maintenance_mode_enabled = self.maintenance_mode_enabled 65 66 commit_hash = self.commit_hash 67 68 version = self.version 69 70 resources_info = self.resources_info.to_dict() 71 72 tenant_info = self.tenant_info.to_dict() 73 74 auth = self.auth.to_dict() 75 76 field_dict: dict[str, Any] = {} 77 field_dict.update(self.additional_properties) 78 field_dict.update( 79 { 80 "resourcesBucket": resources_bucket, 81 "referencesBucket": references_bucket, 82 "liveEndpoint": live_endpoint, 83 "agentEndpoint": agent_endpoint, 84 "region": region, 85 "systemMessage": system_message, 86 "maintenanceModeEnabled": maintenance_mode_enabled, 87 "commitHash": commit_hash, 88 "version": version, 89 "resourcesInfo": resources_info, 90 "tenantInfo": tenant_info, 91 "auth": auth, 92 } 93 ) 94 95 return field_dict 96 97 @classmethod 98 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 99 from ..models.auth_info import AuthInfo 100 from ..models.resources_info import ResourcesInfo 101 from ..models.tenant_info import TenantInfo 102 103 d = dict(src_dict) 104 resources_bucket = d.pop("resourcesBucket") 105 106 references_bucket = d.pop("referencesBucket") 107 108 live_endpoint = d.pop("liveEndpoint") 109 110 agent_endpoint = d.pop("agentEndpoint") 111 112 region = d.pop("region") 113 114 system_message = d.pop("systemMessage") 115 116 maintenance_mode_enabled = d.pop("maintenanceModeEnabled") 117 118 commit_hash = d.pop("commitHash") 119 120 version = d.pop("version") 121 122 resources_info = ResourcesInfo.from_dict(d.pop("resourcesInfo")) 123 124 tenant_info = TenantInfo.from_dict(d.pop("tenantInfo")) 125 126 auth = AuthInfo.from_dict(d.pop("auth")) 127 128 system_info_response = cls( 129 resources_bucket=resources_bucket, 130 references_bucket=references_bucket, 131 live_endpoint=live_endpoint, 132 agent_endpoint=agent_endpoint, 133 region=region, 134 system_message=system_message, 135 maintenance_mode_enabled=maintenance_mode_enabled, 136 commit_hash=commit_hash, 137 version=version, 138 resources_info=resources_info, 139 tenant_info=tenant_info, 140 auth=auth, 141 ) 142 143 system_info_response.additional_properties = d 144 return system_info_response 145 146 @property 147 def additional_keys(self) -> list[str]: 148 return list(self.additional_properties.keys()) 149 150 def __getitem__(self, key: str) -> Any: 151 return self.additional_properties[key] 152 153 def __setitem__(self, key: str, value: Any) -> None: 154 self.additional_properties[key] = value 155 156 def __delitem__(self, key: str) -> None: 157 del self.additional_properties[key] 158 159 def __contains__(self, key: str) -> bool: 160 return key in self.additional_properties
Attributes:
- resources_bucket (str):
- references_bucket (str):
- live_endpoint (str):
- agent_endpoint (str):
- region (str):
- system_message (str):
- maintenance_mode_enabled (bool):
- commit_hash (str):
- version (str):
- resources_info (ResourcesInfo):
- tenant_info (TenantInfo):
- auth (AuthInfo):
35def __init__(self, resources_bucket, references_bucket, live_endpoint, agent_endpoint, region, system_message, maintenance_mode_enabled, commit_hash, version, resources_info, tenant_info, auth): 36 self.resources_bucket = resources_bucket 37 self.references_bucket = references_bucket 38 self.live_endpoint = live_endpoint 39 self.agent_endpoint = agent_endpoint 40 self.region = region 41 self.system_message = system_message 42 self.maintenance_mode_enabled = maintenance_mode_enabled 43 self.commit_hash = commit_hash 44 self.version = version 45 self.resources_info = resources_info 46 self.tenant_info = tenant_info 47 self.auth = auth 48 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class SystemInfoResponse.
51 def to_dict(self) -> dict[str, Any]: 52 resources_bucket = self.resources_bucket 53 54 references_bucket = self.references_bucket 55 56 live_endpoint = self.live_endpoint 57 58 agent_endpoint = self.agent_endpoint 59 60 region = self.region 61 62 system_message = self.system_message 63 64 maintenance_mode_enabled = self.maintenance_mode_enabled 65 66 commit_hash = self.commit_hash 67 68 version = self.version 69 70 resources_info = self.resources_info.to_dict() 71 72 tenant_info = self.tenant_info.to_dict() 73 74 auth = self.auth.to_dict() 75 76 field_dict: dict[str, Any] = {} 77 field_dict.update(self.additional_properties) 78 field_dict.update( 79 { 80 "resourcesBucket": resources_bucket, 81 "referencesBucket": references_bucket, 82 "liveEndpoint": live_endpoint, 83 "agentEndpoint": agent_endpoint, 84 "region": region, 85 "systemMessage": system_message, 86 "maintenanceModeEnabled": maintenance_mode_enabled, 87 "commitHash": commit_hash, 88 "version": version, 89 "resourcesInfo": resources_info, 90 "tenantInfo": tenant_info, 91 "auth": auth, 92 } 93 ) 94 95 return field_dict
97 @classmethod 98 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 99 from ..models.auth_info import AuthInfo 100 from ..models.resources_info import ResourcesInfo 101 from ..models.tenant_info import TenantInfo 102 103 d = dict(src_dict) 104 resources_bucket = d.pop("resourcesBucket") 105 106 references_bucket = d.pop("referencesBucket") 107 108 live_endpoint = d.pop("liveEndpoint") 109 110 agent_endpoint = d.pop("agentEndpoint") 111 112 region = d.pop("region") 113 114 system_message = d.pop("systemMessage") 115 116 maintenance_mode_enabled = d.pop("maintenanceModeEnabled") 117 118 commit_hash = d.pop("commitHash") 119 120 version = d.pop("version") 121 122 resources_info = ResourcesInfo.from_dict(d.pop("resourcesInfo")) 123 124 tenant_info = TenantInfo.from_dict(d.pop("tenantInfo")) 125 126 auth = AuthInfo.from_dict(d.pop("auth")) 127 128 system_info_response = cls( 129 resources_bucket=resources_bucket, 130 references_bucket=references_bucket, 131 live_endpoint=live_endpoint, 132 agent_endpoint=agent_endpoint, 133 region=region, 134 system_message=system_message, 135 maintenance_mode_enabled=maintenance_mode_enabled, 136 commit_hash=commit_hash, 137 version=version, 138 resources_info=resources_info, 139 tenant_info=tenant_info, 140 auth=auth, 141 ) 142 143 system_info_response.additional_properties = d 144 return system_info_response
19@_attrs_define 20class Table: 21 """ 22 Attributes: 23 desc (str): 24 name (str | Unset): User-friendly name of asset 25 type_ (str | Unset): Type of file Example: parquet. 26 rows (int | Unset): Number of rows in table 27 path (str | Unset): Relative path to asset 28 cols (list[ColumnDefinition] | None | Unset): 29 """ 30 31 desc: str 32 name: str | Unset = UNSET 33 type_: str | Unset = UNSET 34 rows: int | Unset = UNSET 35 path: str | Unset = UNSET 36 cols: list[ColumnDefinition] | None | Unset = UNSET 37 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 38 39 def to_dict(self) -> dict[str, Any]: 40 desc = self.desc 41 42 name = self.name 43 44 type_ = self.type_ 45 46 rows = self.rows 47 48 path = self.path 49 50 cols: list[dict[str, Any]] | None | Unset 51 if isinstance(self.cols, Unset): 52 cols = UNSET 53 elif isinstance(self.cols, list): 54 cols = [] 55 for cols_type_0_item_data in self.cols: 56 cols_type_0_item = cols_type_0_item_data.to_dict() 57 cols.append(cols_type_0_item) 58 59 else: 60 cols = self.cols 61 62 field_dict: dict[str, Any] = {} 63 field_dict.update(self.additional_properties) 64 field_dict.update( 65 { 66 "desc": desc, 67 } 68 ) 69 if name is not UNSET: 70 field_dict["name"] = name 71 if type_ is not UNSET: 72 field_dict["type"] = type_ 73 if rows is not UNSET: 74 field_dict["rows"] = rows 75 if path is not UNSET: 76 field_dict["path"] = path 77 if cols is not UNSET: 78 field_dict["cols"] = cols 79 80 return field_dict 81 82 @classmethod 83 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 84 from ..models.column_definition import ColumnDefinition 85 86 d = dict(src_dict) 87 desc = d.pop("desc") 88 89 name = d.pop("name", UNSET) 90 91 type_ = d.pop("type", UNSET) 92 93 rows = d.pop("rows", UNSET) 94 95 path = d.pop("path", UNSET) 96 97 def _parse_cols(data: object) -> list[ColumnDefinition] | None | Unset: 98 if data is None: 99 return data 100 if isinstance(data, Unset): 101 return data 102 try: 103 if not isinstance(data, list): 104 raise TypeError() 105 cols_type_0 = [] 106 _cols_type_0 = data 107 for cols_type_0_item_data in _cols_type_0: 108 cols_type_0_item = ColumnDefinition.from_dict(cols_type_0_item_data) 109 110 cols_type_0.append(cols_type_0_item) 111 112 return cols_type_0 113 except (TypeError, ValueError, AttributeError, KeyError): 114 pass 115 return cast(list[ColumnDefinition] | None | Unset, data) 116 117 cols = _parse_cols(d.pop("cols", UNSET)) 118 119 table = cls( 120 desc=desc, 121 name=name, 122 type_=type_, 123 rows=rows, 124 path=path, 125 cols=cols, 126 ) 127 128 table.additional_properties = d 129 return table 130 131 @property 132 def additional_keys(self) -> list[str]: 133 return list(self.additional_properties.keys()) 134 135 def __getitem__(self, key: str) -> Any: 136 return self.additional_properties[key] 137 138 def __setitem__(self, key: str, value: Any) -> None: 139 self.additional_properties[key] = value 140 141 def __delitem__(self, key: str) -> None: 142 del self.additional_properties[key] 143 144 def __contains__(self, key: str) -> bool: 145 return key in self.additional_properties
Attributes:
- desc (str):
- name (str | Unset): User-friendly name of asset
- type_ (str | Unset): Type of file Example: parquet.
- rows (int | Unset): Number of rows in table
- path (str | Unset): Relative path to asset
- cols (list[ColumnDefinition] | None | Unset):
29def __init__(self, desc, name=attr_dict['name'].default, type_=attr_dict['type_'].default, rows=attr_dict['rows'].default, path=attr_dict['path'].default, cols=attr_dict['cols'].default): 30 self.desc = desc 31 self.name = name 32 self.type_ = type_ 33 self.rows = rows 34 self.path = path 35 self.cols = cols 36 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Table.
39 def to_dict(self) -> dict[str, Any]: 40 desc = self.desc 41 42 name = self.name 43 44 type_ = self.type_ 45 46 rows = self.rows 47 48 path = self.path 49 50 cols: list[dict[str, Any]] | None | Unset 51 if isinstance(self.cols, Unset): 52 cols = UNSET 53 elif isinstance(self.cols, list): 54 cols = [] 55 for cols_type_0_item_data in self.cols: 56 cols_type_0_item = cols_type_0_item_data.to_dict() 57 cols.append(cols_type_0_item) 58 59 else: 60 cols = self.cols 61 62 field_dict: dict[str, Any] = {} 63 field_dict.update(self.additional_properties) 64 field_dict.update( 65 { 66 "desc": desc, 67 } 68 ) 69 if name is not UNSET: 70 field_dict["name"] = name 71 if type_ is not UNSET: 72 field_dict["type"] = type_ 73 if rows is not UNSET: 74 field_dict["rows"] = rows 75 if path is not UNSET: 76 field_dict["path"] = path 77 if cols is not UNSET: 78 field_dict["cols"] = cols 79 80 return field_dict
82 @classmethod 83 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 84 from ..models.column_definition import ColumnDefinition 85 86 d = dict(src_dict) 87 desc = d.pop("desc") 88 89 name = d.pop("name", UNSET) 90 91 type_ = d.pop("type", UNSET) 92 93 rows = d.pop("rows", UNSET) 94 95 path = d.pop("path", UNSET) 96 97 def _parse_cols(data: object) -> list[ColumnDefinition] | None | Unset: 98 if data is None: 99 return data 100 if isinstance(data, Unset): 101 return data 102 try: 103 if not isinstance(data, list): 104 raise TypeError() 105 cols_type_0 = [] 106 _cols_type_0 = data 107 for cols_type_0_item_data in _cols_type_0: 108 cols_type_0_item = ColumnDefinition.from_dict(cols_type_0_item_data) 109 110 cols_type_0.append(cols_type_0_item) 111 112 return cols_type_0 113 except (TypeError, ValueError, AttributeError, KeyError): 114 pass 115 return cast(list[ColumnDefinition] | None | Unset, data) 116 117 cols = _parse_cols(d.pop("cols", UNSET)) 118 119 table = cls( 120 desc=desc, 121 name=name, 122 type_=type_, 123 rows=rows, 124 path=path, 125 cols=cols, 126 ) 127 128 table.additional_properties = d 129 return table
15@_attrs_define 16class Tag: 17 """ 18 Attributes: 19 value (str): The value of the tag 20 editable (bool | Unset): Whether the tag value is editable Default: True. 21 key (None | str | Unset): 22 """ 23 24 value: str 25 editable: bool | Unset = True 26 key: None | str | Unset = UNSET 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 value = self.value 31 32 editable = self.editable 33 34 key: None | str | Unset 35 if isinstance(self.key, Unset): 36 key = UNSET 37 else: 38 key = self.key 39 40 field_dict: dict[str, Any] = {} 41 field_dict.update(self.additional_properties) 42 field_dict.update( 43 { 44 "value": value, 45 } 46 ) 47 if editable is not UNSET: 48 field_dict["editable"] = editable 49 if key is not UNSET: 50 field_dict["key"] = key 51 52 return field_dict 53 54 @classmethod 55 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 56 d = dict(src_dict) 57 value = d.pop("value") 58 59 editable = d.pop("editable", UNSET) 60 61 def _parse_key(data: object) -> None | str | Unset: 62 if data is None: 63 return data 64 if isinstance(data, Unset): 65 return data 66 return cast(None | str | Unset, data) 67 68 key = _parse_key(d.pop("key", UNSET)) 69 70 tag = cls( 71 value=value, 72 editable=editable, 73 key=key, 74 ) 75 76 tag.additional_properties = d 77 return tag 78 79 @property 80 def additional_keys(self) -> list[str]: 81 return list(self.additional_properties.keys()) 82 83 def __getitem__(self, key: str) -> Any: 84 return self.additional_properties[key] 85 86 def __setitem__(self, key: str, value: Any) -> None: 87 self.additional_properties[key] = value 88 89 def __delitem__(self, key: str) -> None: 90 del self.additional_properties[key] 91 92 def __contains__(self, key: str) -> bool: 93 return key in self.additional_properties
Attributes:
- value (str): The value of the tag
- editable (bool | Unset): Whether the tag value is editable Default: True.
- key (None | str | Unset):
26def __init__(self, value, editable=attr_dict['editable'].default, key=attr_dict['key'].default): 27 self.value = value 28 self.editable = editable 29 self.key = key 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Tag.
29 def to_dict(self) -> dict[str, Any]: 30 value = self.value 31 32 editable = self.editable 33 34 key: None | str | Unset 35 if isinstance(self.key, Unset): 36 key = UNSET 37 else: 38 key = self.key 39 40 field_dict: dict[str, Any] = {} 41 field_dict.update(self.additional_properties) 42 field_dict.update( 43 { 44 "value": value, 45 } 46 ) 47 if editable is not UNSET: 48 field_dict["editable"] = editable 49 if key is not UNSET: 50 field_dict["key"] = key 51 52 return field_dict
54 @classmethod 55 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 56 d = dict(src_dict) 57 value = d.pop("value") 58 59 editable = d.pop("editable", UNSET) 60 61 def _parse_key(data: object) -> None | str | Unset: 62 if data is None: 63 return data 64 if isinstance(data, Unset): 65 return data 66 return cast(None | str | Unset, data) 67 68 key = _parse_key(d.pop("key", UNSET)) 69 70 tag = cls( 71 value=value, 72 editable=editable, 73 key=key, 74 ) 75 76 tag.additional_properties = d 77 return tag
17@_attrs_define 18class Task: 19 """ 20 Attributes: 21 name (str): 22 status (str): 23 native_job_id (None | str | Unset): Job ID on the underlying execution environment (i.e. AWS Batch ID) 24 requested_at (datetime.datetime | None | Unset): 25 started_at (datetime.datetime | None | Unset): 26 stopped_at (datetime.datetime | None | Unset): 27 container_image (None | str | Unset): 28 command_line (None | str | Unset): 29 log_location (None | str | Unset): 30 """ 31 32 name: str 33 status: str 34 native_job_id: None | str | Unset = UNSET 35 requested_at: datetime.datetime | None | Unset = UNSET 36 started_at: datetime.datetime | None | Unset = UNSET 37 stopped_at: datetime.datetime | None | Unset = UNSET 38 container_image: None | str | Unset = UNSET 39 command_line: None | str | Unset = UNSET 40 log_location: None | str | Unset = UNSET 41 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 42 43 def to_dict(self) -> dict[str, Any]: 44 name = self.name 45 46 status = self.status 47 48 native_job_id: None | str | Unset 49 if isinstance(self.native_job_id, Unset): 50 native_job_id = UNSET 51 else: 52 native_job_id = self.native_job_id 53 54 requested_at: None | str | Unset 55 if isinstance(self.requested_at, Unset): 56 requested_at = UNSET 57 elif isinstance(self.requested_at, datetime.datetime): 58 requested_at = self.requested_at.isoformat() 59 else: 60 requested_at = self.requested_at 61 62 started_at: None | str | Unset 63 if isinstance(self.started_at, Unset): 64 started_at = UNSET 65 elif isinstance(self.started_at, datetime.datetime): 66 started_at = self.started_at.isoformat() 67 else: 68 started_at = self.started_at 69 70 stopped_at: None | str | Unset 71 if isinstance(self.stopped_at, Unset): 72 stopped_at = UNSET 73 elif isinstance(self.stopped_at, datetime.datetime): 74 stopped_at = self.stopped_at.isoformat() 75 else: 76 stopped_at = self.stopped_at 77 78 container_image: None | str | Unset 79 if isinstance(self.container_image, Unset): 80 container_image = UNSET 81 else: 82 container_image = self.container_image 83 84 command_line: None | str | Unset 85 if isinstance(self.command_line, Unset): 86 command_line = UNSET 87 else: 88 command_line = self.command_line 89 90 log_location: None | str | Unset 91 if isinstance(self.log_location, Unset): 92 log_location = UNSET 93 else: 94 log_location = self.log_location 95 96 field_dict: dict[str, Any] = {} 97 field_dict.update(self.additional_properties) 98 field_dict.update( 99 { 100 "name": name, 101 "status": status, 102 } 103 ) 104 if native_job_id is not UNSET: 105 field_dict["nativeJobId"] = native_job_id 106 if requested_at is not UNSET: 107 field_dict["requestedAt"] = requested_at 108 if started_at is not UNSET: 109 field_dict["startedAt"] = started_at 110 if stopped_at is not UNSET: 111 field_dict["stoppedAt"] = stopped_at 112 if container_image is not UNSET: 113 field_dict["containerImage"] = container_image 114 if command_line is not UNSET: 115 field_dict["commandLine"] = command_line 116 if log_location is not UNSET: 117 field_dict["logLocation"] = log_location 118 119 return field_dict 120 121 @classmethod 122 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 123 d = dict(src_dict) 124 name = d.pop("name") 125 126 status = d.pop("status") 127 128 def _parse_native_job_id(data: object) -> None | str | Unset: 129 if data is None: 130 return data 131 if isinstance(data, Unset): 132 return data 133 return cast(None | str | Unset, data) 134 135 native_job_id = _parse_native_job_id(d.pop("nativeJobId", UNSET)) 136 137 def _parse_requested_at(data: object) -> datetime.datetime | None | Unset: 138 if data is None: 139 return data 140 if isinstance(data, Unset): 141 return data 142 try: 143 if not isinstance(data, str): 144 raise TypeError() 145 requested_at_type_0 = isoparse(data) 146 147 return requested_at_type_0 148 except (TypeError, ValueError, AttributeError, KeyError): 149 pass 150 return cast(datetime.datetime | None | Unset, data) 151 152 requested_at = _parse_requested_at(d.pop("requestedAt", UNSET)) 153 154 def _parse_started_at(data: object) -> datetime.datetime | None | Unset: 155 if data is None: 156 return data 157 if isinstance(data, Unset): 158 return data 159 try: 160 if not isinstance(data, str): 161 raise TypeError() 162 started_at_type_0 = isoparse(data) 163 164 return started_at_type_0 165 except (TypeError, ValueError, AttributeError, KeyError): 166 pass 167 return cast(datetime.datetime | None | Unset, data) 168 169 started_at = _parse_started_at(d.pop("startedAt", UNSET)) 170 171 def _parse_stopped_at(data: object) -> datetime.datetime | None | Unset: 172 if data is None: 173 return data 174 if isinstance(data, Unset): 175 return data 176 try: 177 if not isinstance(data, str): 178 raise TypeError() 179 stopped_at_type_0 = isoparse(data) 180 181 return stopped_at_type_0 182 except (TypeError, ValueError, AttributeError, KeyError): 183 pass 184 return cast(datetime.datetime | None | Unset, data) 185 186 stopped_at = _parse_stopped_at(d.pop("stoppedAt", UNSET)) 187 188 def _parse_container_image(data: object) -> None | str | Unset: 189 if data is None: 190 return data 191 if isinstance(data, Unset): 192 return data 193 return cast(None | str | Unset, data) 194 195 container_image = _parse_container_image(d.pop("containerImage", UNSET)) 196 197 def _parse_command_line(data: object) -> None | str | Unset: 198 if data is None: 199 return data 200 if isinstance(data, Unset): 201 return data 202 return cast(None | str | Unset, data) 203 204 command_line = _parse_command_line(d.pop("commandLine", UNSET)) 205 206 def _parse_log_location(data: object) -> None | str | Unset: 207 if data is None: 208 return data 209 if isinstance(data, Unset): 210 return data 211 return cast(None | str | Unset, data) 212 213 log_location = _parse_log_location(d.pop("logLocation", UNSET)) 214 215 task = cls( 216 name=name, 217 status=status, 218 native_job_id=native_job_id, 219 requested_at=requested_at, 220 started_at=started_at, 221 stopped_at=stopped_at, 222 container_image=container_image, 223 command_line=command_line, 224 log_location=log_location, 225 ) 226 227 task.additional_properties = d 228 return task 229 230 @property 231 def additional_keys(self) -> list[str]: 232 return list(self.additional_properties.keys()) 233 234 def __getitem__(self, key: str) -> Any: 235 return self.additional_properties[key] 236 237 def __setitem__(self, key: str, value: Any) -> None: 238 self.additional_properties[key] = value 239 240 def __delitem__(self, key: str) -> None: 241 del self.additional_properties[key] 242 243 def __contains__(self, key: str) -> bool: 244 return key in self.additional_properties
Attributes:
- name (str):
- status (str):
- native_job_id (None | str | Unset): Job ID on the underlying execution environment (i.e. AWS Batch ID)
- requested_at (datetime.datetime | None | Unset):
- started_at (datetime.datetime | None | Unset):
- stopped_at (datetime.datetime | None | Unset):
- container_image (None | str | Unset):
- command_line (None | str | Unset):
- log_location (None | str | Unset):
32def __init__(self, name, status, native_job_id=attr_dict['native_job_id'].default, requested_at=attr_dict['requested_at'].default, started_at=attr_dict['started_at'].default, stopped_at=attr_dict['stopped_at'].default, container_image=attr_dict['container_image'].default, command_line=attr_dict['command_line'].default, log_location=attr_dict['log_location'].default): 33 self.name = name 34 self.status = status 35 self.native_job_id = native_job_id 36 self.requested_at = requested_at 37 self.started_at = started_at 38 self.stopped_at = stopped_at 39 self.container_image = container_image 40 self.command_line = command_line 41 self.log_location = log_location 42 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Task.
43 def to_dict(self) -> dict[str, Any]: 44 name = self.name 45 46 status = self.status 47 48 native_job_id: None | str | Unset 49 if isinstance(self.native_job_id, Unset): 50 native_job_id = UNSET 51 else: 52 native_job_id = self.native_job_id 53 54 requested_at: None | str | Unset 55 if isinstance(self.requested_at, Unset): 56 requested_at = UNSET 57 elif isinstance(self.requested_at, datetime.datetime): 58 requested_at = self.requested_at.isoformat() 59 else: 60 requested_at = self.requested_at 61 62 started_at: None | str | Unset 63 if isinstance(self.started_at, Unset): 64 started_at = UNSET 65 elif isinstance(self.started_at, datetime.datetime): 66 started_at = self.started_at.isoformat() 67 else: 68 started_at = self.started_at 69 70 stopped_at: None | str | Unset 71 if isinstance(self.stopped_at, Unset): 72 stopped_at = UNSET 73 elif isinstance(self.stopped_at, datetime.datetime): 74 stopped_at = self.stopped_at.isoformat() 75 else: 76 stopped_at = self.stopped_at 77 78 container_image: None | str | Unset 79 if isinstance(self.container_image, Unset): 80 container_image = UNSET 81 else: 82 container_image = self.container_image 83 84 command_line: None | str | Unset 85 if isinstance(self.command_line, Unset): 86 command_line = UNSET 87 else: 88 command_line = self.command_line 89 90 log_location: None | str | Unset 91 if isinstance(self.log_location, Unset): 92 log_location = UNSET 93 else: 94 log_location = self.log_location 95 96 field_dict: dict[str, Any] = {} 97 field_dict.update(self.additional_properties) 98 field_dict.update( 99 { 100 "name": name, 101 "status": status, 102 } 103 ) 104 if native_job_id is not UNSET: 105 field_dict["nativeJobId"] = native_job_id 106 if requested_at is not UNSET: 107 field_dict["requestedAt"] = requested_at 108 if started_at is not UNSET: 109 field_dict["startedAt"] = started_at 110 if stopped_at is not UNSET: 111 field_dict["stoppedAt"] = stopped_at 112 if container_image is not UNSET: 113 field_dict["containerImage"] = container_image 114 if command_line is not UNSET: 115 field_dict["commandLine"] = command_line 116 if log_location is not UNSET: 117 field_dict["logLocation"] = log_location 118 119 return field_dict
121 @classmethod 122 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 123 d = dict(src_dict) 124 name = d.pop("name") 125 126 status = d.pop("status") 127 128 def _parse_native_job_id(data: object) -> None | str | Unset: 129 if data is None: 130 return data 131 if isinstance(data, Unset): 132 return data 133 return cast(None | str | Unset, data) 134 135 native_job_id = _parse_native_job_id(d.pop("nativeJobId", UNSET)) 136 137 def _parse_requested_at(data: object) -> datetime.datetime | None | Unset: 138 if data is None: 139 return data 140 if isinstance(data, Unset): 141 return data 142 try: 143 if not isinstance(data, str): 144 raise TypeError() 145 requested_at_type_0 = isoparse(data) 146 147 return requested_at_type_0 148 except (TypeError, ValueError, AttributeError, KeyError): 149 pass 150 return cast(datetime.datetime | None | Unset, data) 151 152 requested_at = _parse_requested_at(d.pop("requestedAt", UNSET)) 153 154 def _parse_started_at(data: object) -> datetime.datetime | None | Unset: 155 if data is None: 156 return data 157 if isinstance(data, Unset): 158 return data 159 try: 160 if not isinstance(data, str): 161 raise TypeError() 162 started_at_type_0 = isoparse(data) 163 164 return started_at_type_0 165 except (TypeError, ValueError, AttributeError, KeyError): 166 pass 167 return cast(datetime.datetime | None | Unset, data) 168 169 started_at = _parse_started_at(d.pop("startedAt", UNSET)) 170 171 def _parse_stopped_at(data: object) -> datetime.datetime | None | Unset: 172 if data is None: 173 return data 174 if isinstance(data, Unset): 175 return data 176 try: 177 if not isinstance(data, str): 178 raise TypeError() 179 stopped_at_type_0 = isoparse(data) 180 181 return stopped_at_type_0 182 except (TypeError, ValueError, AttributeError, KeyError): 183 pass 184 return cast(datetime.datetime | None | Unset, data) 185 186 stopped_at = _parse_stopped_at(d.pop("stoppedAt", UNSET)) 187 188 def _parse_container_image(data: object) -> None | str | Unset: 189 if data is None: 190 return data 191 if isinstance(data, Unset): 192 return data 193 return cast(None | str | Unset, data) 194 195 container_image = _parse_container_image(d.pop("containerImage", UNSET)) 196 197 def _parse_command_line(data: object) -> None | str | Unset: 198 if data is None: 199 return data 200 if isinstance(data, Unset): 201 return data 202 return cast(None | str | Unset, data) 203 204 command_line = _parse_command_line(d.pop("commandLine", UNSET)) 205 206 def _parse_log_location(data: object) -> None | str | Unset: 207 if data is None: 208 return data 209 if isinstance(data, Unset): 210 return data 211 return cast(None | str | Unset, data) 212 213 log_location = _parse_log_location(d.pop("logLocation", UNSET)) 214 215 task = cls( 216 name=name, 217 status=status, 218 native_job_id=native_job_id, 219 requested_at=requested_at, 220 started_at=started_at, 221 stopped_at=stopped_at, 222 container_image=container_image, 223 command_line=command_line, 224 log_location=log_location, 225 ) 226 227 task.additional_properties = d 228 return task
13@_attrs_define 14class TaskCost: 15 """ 16 Attributes: 17 name (str): 18 task_id (str): 19 status (str): 20 cost (float): 21 """ 22 23 name: str 24 task_id: str 25 status: str 26 cost: float 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 name = self.name 31 32 task_id = self.task_id 33 34 status = self.status 35 36 cost = self.cost 37 38 field_dict: dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "name": name, 43 "taskId": task_id, 44 "status": status, 45 "cost": cost, 46 } 47 ) 48 49 return field_dict 50 51 @classmethod 52 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 53 d = dict(src_dict) 54 name = d.pop("name") 55 56 task_id = d.pop("taskId") 57 58 status = d.pop("status") 59 60 cost = d.pop("cost") 61 62 task_cost = cls( 63 name=name, 64 task_id=task_id, 65 status=status, 66 cost=cost, 67 ) 68 69 task_cost.additional_properties = d 70 return task_cost 71 72 @property 73 def additional_keys(self) -> list[str]: 74 return list(self.additional_properties.keys()) 75 76 def __getitem__(self, key: str) -> Any: 77 return self.additional_properties[key] 78 79 def __setitem__(self, key: str, value: Any) -> None: 80 self.additional_properties[key] = value 81 82 def __delitem__(self, key: str) -> None: 83 del self.additional_properties[key] 84 85 def __contains__(self, key: str) -> bool: 86 return key in self.additional_properties
Attributes:
- name (str):
- task_id (str):
- status (str):
- cost (float):
27def __init__(self, name, task_id, status, cost): 28 self.name = name 29 self.task_id = task_id 30 self.status = status 31 self.cost = cost 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class TaskCost.
29 def to_dict(self) -> dict[str, Any]: 30 name = self.name 31 32 task_id = self.task_id 33 34 status = self.status 35 36 cost = self.cost 37 38 field_dict: dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "name": name, 43 "taskId": task_id, 44 "status": status, 45 "cost": cost, 46 } 47 ) 48 49 return field_dict
51 @classmethod 52 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 53 d = dict(src_dict) 54 name = d.pop("name") 55 56 task_id = d.pop("taskId") 57 58 status = d.pop("status") 59 60 cost = d.pop("cost") 61 62 task_cost = cls( 63 name=name, 64 task_id=task_id, 65 status=status, 66 cost=cost, 67 ) 68 69 task_cost.additional_properties = d 70 return task_cost
18@_attrs_define 19class TenantInfo: 20 """ 21 Attributes: 22 id (str): 23 name (str): 24 description (str): 25 location (str): 26 contact_email (str): 27 tenant_logo_url (str): 28 terms_of_service_url (str): 29 privacy_policy_url (str): 30 login_providers (list[LoginProvider]): 31 features (FeatureFlags): 32 """ 33 34 id: str 35 name: str 36 description: str 37 location: str 38 contact_email: str 39 tenant_logo_url: str 40 terms_of_service_url: str 41 privacy_policy_url: str 42 login_providers: list[LoginProvider] 43 features: FeatureFlags 44 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 45 46 def to_dict(self) -> dict[str, Any]: 47 id = self.id 48 49 name = self.name 50 51 description = self.description 52 53 location = self.location 54 55 contact_email = self.contact_email 56 57 tenant_logo_url = self.tenant_logo_url 58 59 terms_of_service_url = self.terms_of_service_url 60 61 privacy_policy_url = self.privacy_policy_url 62 63 login_providers = [] 64 for login_providers_item_data in self.login_providers: 65 login_providers_item = login_providers_item_data.to_dict() 66 login_providers.append(login_providers_item) 67 68 features = self.features.to_dict() 69 70 field_dict: dict[str, Any] = {} 71 field_dict.update(self.additional_properties) 72 field_dict.update( 73 { 74 "id": id, 75 "name": name, 76 "description": description, 77 "location": location, 78 "contactEmail": contact_email, 79 "tenantLogoUrl": tenant_logo_url, 80 "termsOfServiceUrl": terms_of_service_url, 81 "privacyPolicyUrl": privacy_policy_url, 82 "loginProviders": login_providers, 83 "features": features, 84 } 85 ) 86 87 return field_dict 88 89 @classmethod 90 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 91 from ..models.feature_flags import FeatureFlags 92 from ..models.login_provider import LoginProvider 93 94 d = dict(src_dict) 95 id = d.pop("id") 96 97 name = d.pop("name") 98 99 description = d.pop("description") 100 101 location = d.pop("location") 102 103 contact_email = d.pop("contactEmail") 104 105 tenant_logo_url = d.pop("tenantLogoUrl") 106 107 terms_of_service_url = d.pop("termsOfServiceUrl") 108 109 privacy_policy_url = d.pop("privacyPolicyUrl") 110 111 login_providers = [] 112 _login_providers = d.pop("loginProviders") 113 for login_providers_item_data in _login_providers: 114 login_providers_item = LoginProvider.from_dict(login_providers_item_data) 115 116 login_providers.append(login_providers_item) 117 118 features = FeatureFlags.from_dict(d.pop("features")) 119 120 tenant_info = cls( 121 id=id, 122 name=name, 123 description=description, 124 location=location, 125 contact_email=contact_email, 126 tenant_logo_url=tenant_logo_url, 127 terms_of_service_url=terms_of_service_url, 128 privacy_policy_url=privacy_policy_url, 129 login_providers=login_providers, 130 features=features, 131 ) 132 133 tenant_info.additional_properties = d 134 return tenant_info 135 136 @property 137 def additional_keys(self) -> list[str]: 138 return list(self.additional_properties.keys()) 139 140 def __getitem__(self, key: str) -> Any: 141 return self.additional_properties[key] 142 143 def __setitem__(self, key: str, value: Any) -> None: 144 self.additional_properties[key] = value 145 146 def __delitem__(self, key: str) -> None: 147 del self.additional_properties[key] 148 149 def __contains__(self, key: str) -> bool: 150 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- description (str):
- location (str):
- contact_email (str):
- tenant_logo_url (str):
- terms_of_service_url (str):
- privacy_policy_url (str):
- login_providers (list[LoginProvider]):
- features (FeatureFlags):
33def __init__(self, id, name, description, location, contact_email, tenant_logo_url, terms_of_service_url, privacy_policy_url, login_providers, features): 34 self.id = id 35 self.name = name 36 self.description = description 37 self.location = location 38 self.contact_email = contact_email 39 self.tenant_logo_url = tenant_logo_url 40 self.terms_of_service_url = terms_of_service_url 41 self.privacy_policy_url = privacy_policy_url 42 self.login_providers = login_providers 43 self.features = features 44 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class TenantInfo.
46 def to_dict(self) -> dict[str, Any]: 47 id = self.id 48 49 name = self.name 50 51 description = self.description 52 53 location = self.location 54 55 contact_email = self.contact_email 56 57 tenant_logo_url = self.tenant_logo_url 58 59 terms_of_service_url = self.terms_of_service_url 60 61 privacy_policy_url = self.privacy_policy_url 62 63 login_providers = [] 64 for login_providers_item_data in self.login_providers: 65 login_providers_item = login_providers_item_data.to_dict() 66 login_providers.append(login_providers_item) 67 68 features = self.features.to_dict() 69 70 field_dict: dict[str, Any] = {} 71 field_dict.update(self.additional_properties) 72 field_dict.update( 73 { 74 "id": id, 75 "name": name, 76 "description": description, 77 "location": location, 78 "contactEmail": contact_email, 79 "tenantLogoUrl": tenant_logo_url, 80 "termsOfServiceUrl": terms_of_service_url, 81 "privacyPolicyUrl": privacy_policy_url, 82 "loginProviders": login_providers, 83 "features": features, 84 } 85 ) 86 87 return field_dict
89 @classmethod 90 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 91 from ..models.feature_flags import FeatureFlags 92 from ..models.login_provider import LoginProvider 93 94 d = dict(src_dict) 95 id = d.pop("id") 96 97 name = d.pop("name") 98 99 description = d.pop("description") 100 101 location = d.pop("location") 102 103 contact_email = d.pop("contactEmail") 104 105 tenant_logo_url = d.pop("tenantLogoUrl") 106 107 terms_of_service_url = d.pop("termsOfServiceUrl") 108 109 privacy_policy_url = d.pop("privacyPolicyUrl") 110 111 login_providers = [] 112 _login_providers = d.pop("loginProviders") 113 for login_providers_item_data in _login_providers: 114 login_providers_item = LoginProvider.from_dict(login_providers_item_data) 115 116 login_providers.append(login_providers_item) 117 118 features = FeatureFlags.from_dict(d.pop("features")) 119 120 tenant_info = cls( 121 id=id, 122 name=name, 123 description=description, 124 location=location, 125 contact_email=contact_email, 126 tenant_logo_url=tenant_logo_url, 127 terms_of_service_url=terms_of_service_url, 128 privacy_policy_url=privacy_policy_url, 129 login_providers=login_providers, 130 features=features, 131 ) 132 133 tenant_info.additional_properties = d 134 return tenant_info
17@_attrs_define 18class UpdateDatasetRequest: 19 """ 20 Attributes: 21 name (str): 22 description (str): 23 process_id (str): 24 tags (list[Tag]): 25 """ 26 27 name: str 28 description: str 29 process_id: str 30 tags: list[Tag] 31 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 32 33 def to_dict(self) -> dict[str, Any]: 34 name = self.name 35 36 description = self.description 37 38 process_id = self.process_id 39 40 tags = [] 41 for tags_item_data in self.tags: 42 tags_item = tags_item_data.to_dict() 43 tags.append(tags_item) 44 45 field_dict: dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "name": name, 50 "description": description, 51 "processId": process_id, 52 "tags": tags, 53 } 54 ) 55 56 return field_dict 57 58 @classmethod 59 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 60 from ..models.tag import Tag 61 62 d = dict(src_dict) 63 name = d.pop("name") 64 65 description = d.pop("description") 66 67 process_id = d.pop("processId") 68 69 tags = [] 70 _tags = d.pop("tags") 71 for tags_item_data in _tags: 72 tags_item = Tag.from_dict(tags_item_data) 73 74 tags.append(tags_item) 75 76 update_dataset_request = cls( 77 name=name, 78 description=description, 79 process_id=process_id, 80 tags=tags, 81 ) 82 83 update_dataset_request.additional_properties = d 84 return update_dataset_request 85 86 @property 87 def additional_keys(self) -> list[str]: 88 return list(self.additional_properties.keys()) 89 90 def __getitem__(self, key: str) -> Any: 91 return self.additional_properties[key] 92 93 def __setitem__(self, key: str, value: Any) -> None: 94 self.additional_properties[key] = value 95 96 def __delitem__(self, key: str) -> None: 97 del self.additional_properties[key] 98 99 def __contains__(self, key: str) -> bool: 100 return key in self.additional_properties
Attributes:
- name (str):
- description (str):
- process_id (str):
- tags (list[Tag]):
27def __init__(self, name, description, process_id, tags): 28 self.name = name 29 self.description = description 30 self.process_id = process_id 31 self.tags = tags 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UpdateDatasetRequest.
33 def to_dict(self) -> dict[str, Any]: 34 name = self.name 35 36 description = self.description 37 38 process_id = self.process_id 39 40 tags = [] 41 for tags_item_data in self.tags: 42 tags_item = tags_item_data.to_dict() 43 tags.append(tags_item) 44 45 field_dict: dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "name": name, 50 "description": description, 51 "processId": process_id, 52 "tags": tags, 53 } 54 ) 55 56 return field_dict
58 @classmethod 59 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 60 from ..models.tag import Tag 61 62 d = dict(src_dict) 63 name = d.pop("name") 64 65 description = d.pop("description") 66 67 process_id = d.pop("processId") 68 69 tags = [] 70 _tags = d.pop("tags") 71 for tags_item_data in _tags: 72 tags_item = Tag.from_dict(tags_item_data) 73 74 tags.append(tags_item) 75 76 update_dataset_request = cls( 77 name=name, 78 description=description, 79 process_id=process_id, 80 tags=tags, 81 ) 82 83 update_dataset_request.additional_properties = d 84 return update_dataset_request
19@_attrs_define 20class UpdateUserRequest: 21 """ 22 Attributes: 23 name (str): Display name of the user 24 email (str): Email address of the user 25 phone (str | Unset): Phone number of the user 26 department (str | Unset): Department or lab the user belongs to 27 job_title (str | Unset): Job title or role of the user 28 organization (str | Unset): The organization the user belongs to, only editable by administrators 29 settings (None | Unset | UserSettings): 30 groups (list[str] | Unset): Groups the user belongs to, only editable by administrators 31 """ 32 33 name: str 34 email: str 35 phone: str | Unset = UNSET 36 department: str | Unset = UNSET 37 job_title: str | Unset = UNSET 38 organization: str | Unset = UNSET 39 settings: None | Unset | UserSettings = UNSET 40 groups: list[str] | Unset = UNSET 41 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 42 43 def to_dict(self) -> dict[str, Any]: 44 from ..models.user_settings import UserSettings 45 46 name = self.name 47 48 email = self.email 49 50 phone = self.phone 51 52 department = self.department 53 54 job_title = self.job_title 55 56 organization = self.organization 57 58 settings: dict[str, Any] | None | Unset 59 if isinstance(self.settings, Unset): 60 settings = UNSET 61 elif isinstance(self.settings, UserSettings): 62 settings = self.settings.to_dict() 63 else: 64 settings = self.settings 65 66 groups: list[str] | Unset = UNSET 67 if not isinstance(self.groups, Unset): 68 groups = self.groups 69 70 field_dict: dict[str, Any] = {} 71 field_dict.update(self.additional_properties) 72 field_dict.update( 73 { 74 "name": name, 75 "email": email, 76 } 77 ) 78 if phone is not UNSET: 79 field_dict["phone"] = phone 80 if department is not UNSET: 81 field_dict["department"] = department 82 if job_title is not UNSET: 83 field_dict["jobTitle"] = job_title 84 if organization is not UNSET: 85 field_dict["organization"] = organization 86 if settings is not UNSET: 87 field_dict["settings"] = settings 88 if groups is not UNSET: 89 field_dict["groups"] = groups 90 91 return field_dict 92 93 @classmethod 94 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 95 from ..models.user_settings import UserSettings 96 97 d = dict(src_dict) 98 name = d.pop("name") 99 100 email = d.pop("email") 101 102 phone = d.pop("phone", UNSET) 103 104 department = d.pop("department", UNSET) 105 106 job_title = d.pop("jobTitle", UNSET) 107 108 organization = d.pop("organization", UNSET) 109 110 def _parse_settings(data: object) -> None | Unset | UserSettings: 111 if data is None: 112 return data 113 if isinstance(data, Unset): 114 return data 115 try: 116 if not isinstance(data, dict): 117 raise TypeError() 118 settings_type_1 = UserSettings.from_dict(data) 119 120 return settings_type_1 121 except (TypeError, ValueError, AttributeError, KeyError): 122 pass 123 return cast(None | Unset | UserSettings, data) 124 125 settings = _parse_settings(d.pop("settings", UNSET)) 126 127 groups = cast(list[str], d.pop("groups", UNSET)) 128 129 update_user_request = cls( 130 name=name, 131 email=email, 132 phone=phone, 133 department=department, 134 job_title=job_title, 135 organization=organization, 136 settings=settings, 137 groups=groups, 138 ) 139 140 update_user_request.additional_properties = d 141 return update_user_request 142 143 @property 144 def additional_keys(self) -> list[str]: 145 return list(self.additional_properties.keys()) 146 147 def __getitem__(self, key: str) -> Any: 148 return self.additional_properties[key] 149 150 def __setitem__(self, key: str, value: Any) -> None: 151 self.additional_properties[key] = value 152 153 def __delitem__(self, key: str) -> None: 154 del self.additional_properties[key] 155 156 def __contains__(self, key: str) -> bool: 157 return key in self.additional_properties
Attributes:
- name (str): Display name of the user
- email (str): Email address of the user
- phone (str | Unset): Phone number of the user
- department (str | Unset): Department or lab the user belongs to
- job_title (str | Unset): Job title or role of the user
- organization (str | Unset): The organization the user belongs to, only editable by administrators
- settings (None | Unset | UserSettings):
- groups (list[str] | Unset): Groups the user belongs to, only editable by administrators
31def __init__(self, name, email, phone=attr_dict['phone'].default, department=attr_dict['department'].default, job_title=attr_dict['job_title'].default, organization=attr_dict['organization'].default, settings=attr_dict['settings'].default, groups=attr_dict['groups'].default): 32 self.name = name 33 self.email = email 34 self.phone = phone 35 self.department = department 36 self.job_title = job_title 37 self.organization = organization 38 self.settings = settings 39 self.groups = groups 40 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UpdateUserRequest.
43 def to_dict(self) -> dict[str, Any]: 44 from ..models.user_settings import UserSettings 45 46 name = self.name 47 48 email = self.email 49 50 phone = self.phone 51 52 department = self.department 53 54 job_title = self.job_title 55 56 organization = self.organization 57 58 settings: dict[str, Any] | None | Unset 59 if isinstance(self.settings, Unset): 60 settings = UNSET 61 elif isinstance(self.settings, UserSettings): 62 settings = self.settings.to_dict() 63 else: 64 settings = self.settings 65 66 groups: list[str] | Unset = UNSET 67 if not isinstance(self.groups, Unset): 68 groups = self.groups 69 70 field_dict: dict[str, Any] = {} 71 field_dict.update(self.additional_properties) 72 field_dict.update( 73 { 74 "name": name, 75 "email": email, 76 } 77 ) 78 if phone is not UNSET: 79 field_dict["phone"] = phone 80 if department is not UNSET: 81 field_dict["department"] = department 82 if job_title is not UNSET: 83 field_dict["jobTitle"] = job_title 84 if organization is not UNSET: 85 field_dict["organization"] = organization 86 if settings is not UNSET: 87 field_dict["settings"] = settings 88 if groups is not UNSET: 89 field_dict["groups"] = groups 90 91 return field_dict
93 @classmethod 94 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 95 from ..models.user_settings import UserSettings 96 97 d = dict(src_dict) 98 name = d.pop("name") 99 100 email = d.pop("email") 101 102 phone = d.pop("phone", UNSET) 103 104 department = d.pop("department", UNSET) 105 106 job_title = d.pop("jobTitle", UNSET) 107 108 organization = d.pop("organization", UNSET) 109 110 def _parse_settings(data: object) -> None | Unset | UserSettings: 111 if data is None: 112 return data 113 if isinstance(data, Unset): 114 return data 115 try: 116 if not isinstance(data, dict): 117 raise TypeError() 118 settings_type_1 = UserSettings.from_dict(data) 119 120 return settings_type_1 121 except (TypeError, ValueError, AttributeError, KeyError): 122 pass 123 return cast(None | Unset | UserSettings, data) 124 125 settings = _parse_settings(d.pop("settings", UNSET)) 126 127 groups = cast(list[str], d.pop("groups", UNSET)) 128 129 update_user_request = cls( 130 name=name, 131 email=email, 132 phone=phone, 133 department=department, 134 job_title=job_title, 135 organization=organization, 136 settings=settings, 137 groups=groups, 138 ) 139 140 update_user_request.additional_properties = d 141 return update_user_request
13@_attrs_define 14class UploadDatasetCreateResponse: 15 """ 16 Attributes: 17 id (str): 18 message (str): 19 upload_path (str): 20 bucket (str): 21 """ 22 23 id: str 24 message: str 25 upload_path: str 26 bucket: str 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 id = self.id 31 32 message = self.message 33 34 upload_path = self.upload_path 35 36 bucket = self.bucket 37 38 field_dict: dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "id": id, 43 "message": message, 44 "uploadPath": upload_path, 45 "bucket": bucket, 46 } 47 ) 48 49 return field_dict 50 51 @classmethod 52 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 53 d = dict(src_dict) 54 id = d.pop("id") 55 56 message = d.pop("message") 57 58 upload_path = d.pop("uploadPath") 59 60 bucket = d.pop("bucket") 61 62 upload_dataset_create_response = cls( 63 id=id, 64 message=message, 65 upload_path=upload_path, 66 bucket=bucket, 67 ) 68 69 upload_dataset_create_response.additional_properties = d 70 return upload_dataset_create_response 71 72 @property 73 def additional_keys(self) -> list[str]: 74 return list(self.additional_properties.keys()) 75 76 def __getitem__(self, key: str) -> Any: 77 return self.additional_properties[key] 78 79 def __setitem__(self, key: str, value: Any) -> None: 80 self.additional_properties[key] = value 81 82 def __delitem__(self, key: str) -> None: 83 del self.additional_properties[key] 84 85 def __contains__(self, key: str) -> bool: 86 return key in self.additional_properties
Attributes:
- id (str):
- message (str):
- upload_path (str):
- bucket (str):
27def __init__(self, id, message, upload_path, bucket): 28 self.id = id 29 self.message = message 30 self.upload_path = upload_path 31 self.bucket = bucket 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UploadDatasetCreateResponse.
29 def to_dict(self) -> dict[str, Any]: 30 id = self.id 31 32 message = self.message 33 34 upload_path = self.upload_path 35 36 bucket = self.bucket 37 38 field_dict: dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "id": id, 43 "message": message, 44 "uploadPath": upload_path, 45 "bucket": bucket, 46 } 47 ) 48 49 return field_dict
51 @classmethod 52 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 53 d = dict(src_dict) 54 id = d.pop("id") 55 56 message = d.pop("message") 57 58 upload_path = d.pop("uploadPath") 59 60 bucket = d.pop("bucket") 61 62 upload_dataset_create_response = cls( 63 id=id, 64 message=message, 65 upload_path=upload_path, 66 bucket=bucket, 67 ) 68 69 upload_dataset_create_response.additional_properties = d 70 return upload_dataset_create_response
19@_attrs_define 20class UploadDatasetRequest: 21 """ 22 Attributes: 23 name (str): Name of the dataset 24 process_id (str): ID of the ingest process Example: paired_dnaseq. 25 expected_files (list[str]): 26 description (str | Unset): Description of the dataset 27 tags (list[Tag] | None | Unset): List of tags to apply to the dataset 28 """ 29 30 name: str 31 process_id: str 32 expected_files: list[str] 33 description: str | Unset = UNSET 34 tags: list[Tag] | None | Unset = UNSET 35 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 36 37 def to_dict(self) -> dict[str, Any]: 38 name = self.name 39 40 process_id = self.process_id 41 42 expected_files = self.expected_files 43 44 description = self.description 45 46 tags: list[dict[str, Any]] | None | Unset 47 if isinstance(self.tags, Unset): 48 tags = UNSET 49 elif isinstance(self.tags, list): 50 tags = [] 51 for tags_type_0_item_data in self.tags: 52 tags_type_0_item = tags_type_0_item_data.to_dict() 53 tags.append(tags_type_0_item) 54 55 else: 56 tags = self.tags 57 58 field_dict: dict[str, Any] = {} 59 field_dict.update(self.additional_properties) 60 field_dict.update( 61 { 62 "name": name, 63 "processId": process_id, 64 "expectedFiles": expected_files, 65 } 66 ) 67 if description is not UNSET: 68 field_dict["description"] = description 69 if tags is not UNSET: 70 field_dict["tags"] = tags 71 72 return field_dict 73 74 @classmethod 75 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 76 from ..models.tag import Tag 77 78 d = dict(src_dict) 79 name = d.pop("name") 80 81 process_id = d.pop("processId") 82 83 expected_files = cast(list[str], d.pop("expectedFiles")) 84 85 description = d.pop("description", UNSET) 86 87 def _parse_tags(data: object) -> list[Tag] | None | Unset: 88 if data is None: 89 return data 90 if isinstance(data, Unset): 91 return data 92 try: 93 if not isinstance(data, list): 94 raise TypeError() 95 tags_type_0 = [] 96 _tags_type_0 = data 97 for tags_type_0_item_data in _tags_type_0: 98 tags_type_0_item = Tag.from_dict(tags_type_0_item_data) 99 100 tags_type_0.append(tags_type_0_item) 101 102 return tags_type_0 103 except (TypeError, ValueError, AttributeError, KeyError): 104 pass 105 return cast(list[Tag] | None | Unset, data) 106 107 tags = _parse_tags(d.pop("tags", UNSET)) 108 109 upload_dataset_request = cls( 110 name=name, 111 process_id=process_id, 112 expected_files=expected_files, 113 description=description, 114 tags=tags, 115 ) 116 117 upload_dataset_request.additional_properties = d 118 return upload_dataset_request 119 120 @property 121 def additional_keys(self) -> list[str]: 122 return list(self.additional_properties.keys()) 123 124 def __getitem__(self, key: str) -> Any: 125 return self.additional_properties[key] 126 127 def __setitem__(self, key: str, value: Any) -> None: 128 self.additional_properties[key] = value 129 130 def __delitem__(self, key: str) -> None: 131 del self.additional_properties[key] 132 133 def __contains__(self, key: str) -> bool: 134 return key in self.additional_properties
Attributes:
- name (str): Name of the dataset
- process_id (str): ID of the ingest process Example: paired_dnaseq.
- expected_files (list[str]):
- description (str | Unset): Description of the dataset
- tags (list[Tag] | None | Unset): List of tags to apply to the dataset
28def __init__(self, name, process_id, expected_files, description=attr_dict['description'].default, tags=attr_dict['tags'].default): 29 self.name = name 30 self.process_id = process_id 31 self.expected_files = expected_files 32 self.description = description 33 self.tags = tags 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UploadDatasetRequest.
37 def to_dict(self) -> dict[str, Any]: 38 name = self.name 39 40 process_id = self.process_id 41 42 expected_files = self.expected_files 43 44 description = self.description 45 46 tags: list[dict[str, Any]] | None | Unset 47 if isinstance(self.tags, Unset): 48 tags = UNSET 49 elif isinstance(self.tags, list): 50 tags = [] 51 for tags_type_0_item_data in self.tags: 52 tags_type_0_item = tags_type_0_item_data.to_dict() 53 tags.append(tags_type_0_item) 54 55 else: 56 tags = self.tags 57 58 field_dict: dict[str, Any] = {} 59 field_dict.update(self.additional_properties) 60 field_dict.update( 61 { 62 "name": name, 63 "processId": process_id, 64 "expectedFiles": expected_files, 65 } 66 ) 67 if description is not UNSET: 68 field_dict["description"] = description 69 if tags is not UNSET: 70 field_dict["tags"] = tags 71 72 return field_dict
74 @classmethod 75 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 76 from ..models.tag import Tag 77 78 d = dict(src_dict) 79 name = d.pop("name") 80 81 process_id = d.pop("processId") 82 83 expected_files = cast(list[str], d.pop("expectedFiles")) 84 85 description = d.pop("description", UNSET) 86 87 def _parse_tags(data: object) -> list[Tag] | None | Unset: 88 if data is None: 89 return data 90 if isinstance(data, Unset): 91 return data 92 try: 93 if not isinstance(data, list): 94 raise TypeError() 95 tags_type_0 = [] 96 _tags_type_0 = data 97 for tags_type_0_item_data in _tags_type_0: 98 tags_type_0_item = Tag.from_dict(tags_type_0_item_data) 99 100 tags_type_0.append(tags_type_0_item) 101 102 return tags_type_0 103 except (TypeError, ValueError, AttributeError, KeyError): 104 pass 105 return cast(list[Tag] | None | Unset, data) 106 107 tags = _parse_tags(d.pop("tags", UNSET)) 108 109 upload_dataset_request = cls( 110 name=name, 111 process_id=process_id, 112 expected_files=expected_files, 113 description=description, 114 tags=tags, 115 ) 116 117 upload_dataset_request.additional_properties = d 118 return upload_dataset_request
13@_attrs_define 14class User: 15 """ 16 Attributes: 17 name (str): 18 username (str): 19 organization (str): 20 department (str): 21 job_title (str): 22 """ 23 24 name: str 25 username: str 26 organization: str 27 department: str 28 job_title: str 29 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 30 31 def to_dict(self) -> dict[str, Any]: 32 name = self.name 33 34 username = self.username 35 36 organization = self.organization 37 38 department = self.department 39 40 job_title = self.job_title 41 42 field_dict: dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "name": name, 47 "username": username, 48 "organization": organization, 49 "department": department, 50 "jobTitle": job_title, 51 } 52 ) 53 54 return field_dict 55 56 @classmethod 57 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 58 d = dict(src_dict) 59 name = d.pop("name") 60 61 username = d.pop("username") 62 63 organization = d.pop("organization") 64 65 department = d.pop("department") 66 67 job_title = d.pop("jobTitle") 68 69 user = cls( 70 name=name, 71 username=username, 72 organization=organization, 73 department=department, 74 job_title=job_title, 75 ) 76 77 user.additional_properties = d 78 return user 79 80 @property 81 def additional_keys(self) -> list[str]: 82 return list(self.additional_properties.keys()) 83 84 def __getitem__(self, key: str) -> Any: 85 return self.additional_properties[key] 86 87 def __setitem__(self, key: str, value: Any) -> None: 88 self.additional_properties[key] = value 89 90 def __delitem__(self, key: str) -> None: 91 del self.additional_properties[key] 92 93 def __contains__(self, key: str) -> bool: 94 return key in self.additional_properties
Attributes:
- name (str):
- username (str):
- organization (str):
- department (str):
- job_title (str):
28def __init__(self, name, username, organization, department, job_title): 29 self.name = name 30 self.username = username 31 self.organization = organization 32 self.department = department 33 self.job_title = job_title 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class User.
31 def to_dict(self) -> dict[str, Any]: 32 name = self.name 33 34 username = self.username 35 36 organization = self.organization 37 38 department = self.department 39 40 job_title = self.job_title 41 42 field_dict: dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "name": name, 47 "username": username, 48 "organization": organization, 49 "department": department, 50 "jobTitle": job_title, 51 } 52 ) 53 54 return field_dict
56 @classmethod 57 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 58 d = dict(src_dict) 59 name = d.pop("name") 60 61 username = d.pop("username") 62 63 organization = d.pop("organization") 64 65 department = d.pop("department") 66 67 job_title = d.pop("jobTitle") 68 69 user = cls( 70 name=name, 71 username=username, 72 organization=organization, 73 department=department, 74 job_title=job_title, 75 ) 76 77 user.additional_properties = d 78 return user
22@_attrs_define 23class UserDetail: 24 """ 25 Attributes: 26 username (str): 27 name (str): 28 phone (str): 29 email (str): 30 organization (str): 31 job_title (str): 32 department (str): 33 invited_by (str): 34 project_assignments (list[UserProjectAssignment]): 35 groups (list[str]): 36 settings (UserSettings): Additional settings for the user 37 sign_up_time (datetime.datetime | None | Unset): 38 last_signed_in (datetime.datetime | None | Unset): 39 """ 40 41 username: str 42 name: str 43 phone: str 44 email: str 45 organization: str 46 job_title: str 47 department: str 48 invited_by: str 49 project_assignments: list[UserProjectAssignment] 50 groups: list[str] 51 settings: UserSettings 52 sign_up_time: datetime.datetime | None | Unset = UNSET 53 last_signed_in: datetime.datetime | None | Unset = UNSET 54 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 55 56 def to_dict(self) -> dict[str, Any]: 57 username = self.username 58 59 name = self.name 60 61 phone = self.phone 62 63 email = self.email 64 65 organization = self.organization 66 67 job_title = self.job_title 68 69 department = self.department 70 71 invited_by = self.invited_by 72 73 project_assignments = [] 74 for project_assignments_item_data in self.project_assignments: 75 project_assignments_item = project_assignments_item_data.to_dict() 76 project_assignments.append(project_assignments_item) 77 78 groups = self.groups 79 80 settings = self.settings.to_dict() 81 82 sign_up_time: None | str | Unset 83 if isinstance(self.sign_up_time, Unset): 84 sign_up_time = UNSET 85 elif isinstance(self.sign_up_time, datetime.datetime): 86 sign_up_time = self.sign_up_time.isoformat() 87 else: 88 sign_up_time = self.sign_up_time 89 90 last_signed_in: None | str | Unset 91 if isinstance(self.last_signed_in, Unset): 92 last_signed_in = UNSET 93 elif isinstance(self.last_signed_in, datetime.datetime): 94 last_signed_in = self.last_signed_in.isoformat() 95 else: 96 last_signed_in = self.last_signed_in 97 98 field_dict: dict[str, Any] = {} 99 field_dict.update(self.additional_properties) 100 field_dict.update( 101 { 102 "username": username, 103 "name": name, 104 "phone": phone, 105 "email": email, 106 "organization": organization, 107 "jobTitle": job_title, 108 "department": department, 109 "invitedBy": invited_by, 110 "projectAssignments": project_assignments, 111 "groups": groups, 112 "settings": settings, 113 } 114 ) 115 if sign_up_time is not UNSET: 116 field_dict["signUpTime"] = sign_up_time 117 if last_signed_in is not UNSET: 118 field_dict["lastSignedIn"] = last_signed_in 119 120 return field_dict 121 122 @classmethod 123 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 124 from ..models.user_project_assignment import UserProjectAssignment 125 from ..models.user_settings import UserSettings 126 127 d = dict(src_dict) 128 username = d.pop("username") 129 130 name = d.pop("name") 131 132 phone = d.pop("phone") 133 134 email = d.pop("email") 135 136 organization = d.pop("organization") 137 138 job_title = d.pop("jobTitle") 139 140 department = d.pop("department") 141 142 invited_by = d.pop("invitedBy") 143 144 project_assignments = [] 145 _project_assignments = d.pop("projectAssignments") 146 for project_assignments_item_data in _project_assignments: 147 project_assignments_item = UserProjectAssignment.from_dict(project_assignments_item_data) 148 149 project_assignments.append(project_assignments_item) 150 151 groups = cast(list[str], d.pop("groups")) 152 153 settings = UserSettings.from_dict(d.pop("settings")) 154 155 def _parse_sign_up_time(data: object) -> datetime.datetime | None | Unset: 156 if data is None: 157 return data 158 if isinstance(data, Unset): 159 return data 160 try: 161 if not isinstance(data, str): 162 raise TypeError() 163 sign_up_time_type_0 = isoparse(data) 164 165 return sign_up_time_type_0 166 except (TypeError, ValueError, AttributeError, KeyError): 167 pass 168 return cast(datetime.datetime | None | Unset, data) 169 170 sign_up_time = _parse_sign_up_time(d.pop("signUpTime", UNSET)) 171 172 def _parse_last_signed_in(data: object) -> datetime.datetime | None | Unset: 173 if data is None: 174 return data 175 if isinstance(data, Unset): 176 return data 177 try: 178 if not isinstance(data, str): 179 raise TypeError() 180 last_signed_in_type_0 = isoparse(data) 181 182 return last_signed_in_type_0 183 except (TypeError, ValueError, AttributeError, KeyError): 184 pass 185 return cast(datetime.datetime | None | Unset, data) 186 187 last_signed_in = _parse_last_signed_in(d.pop("lastSignedIn", UNSET)) 188 189 user_detail = cls( 190 username=username, 191 name=name, 192 phone=phone, 193 email=email, 194 organization=organization, 195 job_title=job_title, 196 department=department, 197 invited_by=invited_by, 198 project_assignments=project_assignments, 199 groups=groups, 200 settings=settings, 201 sign_up_time=sign_up_time, 202 last_signed_in=last_signed_in, 203 ) 204 205 user_detail.additional_properties = d 206 return user_detail 207 208 @property 209 def additional_keys(self) -> list[str]: 210 return list(self.additional_properties.keys()) 211 212 def __getitem__(self, key: str) -> Any: 213 return self.additional_properties[key] 214 215 def __setitem__(self, key: str, value: Any) -> None: 216 self.additional_properties[key] = value 217 218 def __delitem__(self, key: str) -> None: 219 del self.additional_properties[key] 220 221 def __contains__(self, key: str) -> bool: 222 return key in self.additional_properties
Attributes:
- username (str):
- name (str):
- phone (str):
- email (str):
- organization (str):
- job_title (str):
- department (str):
- invited_by (str):
- project_assignments (list[UserProjectAssignment]):
- groups (list[str]):
- settings (UserSettings): Additional settings for the user
- sign_up_time (datetime.datetime | None | Unset):
- last_signed_in (datetime.datetime | None | Unset):
36def __init__(self, username, name, phone, email, organization, job_title, department, invited_by, project_assignments, groups, settings, sign_up_time=attr_dict['sign_up_time'].default, last_signed_in=attr_dict['last_signed_in'].default): 37 self.username = username 38 self.name = name 39 self.phone = phone 40 self.email = email 41 self.organization = organization 42 self.job_title = job_title 43 self.department = department 44 self.invited_by = invited_by 45 self.project_assignments = project_assignments 46 self.groups = groups 47 self.settings = settings 48 self.sign_up_time = sign_up_time 49 self.last_signed_in = last_signed_in 50 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UserDetail.
56 def to_dict(self) -> dict[str, Any]: 57 username = self.username 58 59 name = self.name 60 61 phone = self.phone 62 63 email = self.email 64 65 organization = self.organization 66 67 job_title = self.job_title 68 69 department = self.department 70 71 invited_by = self.invited_by 72 73 project_assignments = [] 74 for project_assignments_item_data in self.project_assignments: 75 project_assignments_item = project_assignments_item_data.to_dict() 76 project_assignments.append(project_assignments_item) 77 78 groups = self.groups 79 80 settings = self.settings.to_dict() 81 82 sign_up_time: None | str | Unset 83 if isinstance(self.sign_up_time, Unset): 84 sign_up_time = UNSET 85 elif isinstance(self.sign_up_time, datetime.datetime): 86 sign_up_time = self.sign_up_time.isoformat() 87 else: 88 sign_up_time = self.sign_up_time 89 90 last_signed_in: None | str | Unset 91 if isinstance(self.last_signed_in, Unset): 92 last_signed_in = UNSET 93 elif isinstance(self.last_signed_in, datetime.datetime): 94 last_signed_in = self.last_signed_in.isoformat() 95 else: 96 last_signed_in = self.last_signed_in 97 98 field_dict: dict[str, Any] = {} 99 field_dict.update(self.additional_properties) 100 field_dict.update( 101 { 102 "username": username, 103 "name": name, 104 "phone": phone, 105 "email": email, 106 "organization": organization, 107 "jobTitle": job_title, 108 "department": department, 109 "invitedBy": invited_by, 110 "projectAssignments": project_assignments, 111 "groups": groups, 112 "settings": settings, 113 } 114 ) 115 if sign_up_time is not UNSET: 116 field_dict["signUpTime"] = sign_up_time 117 if last_signed_in is not UNSET: 118 field_dict["lastSignedIn"] = last_signed_in 119 120 return field_dict
122 @classmethod 123 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 124 from ..models.user_project_assignment import UserProjectAssignment 125 from ..models.user_settings import UserSettings 126 127 d = dict(src_dict) 128 username = d.pop("username") 129 130 name = d.pop("name") 131 132 phone = d.pop("phone") 133 134 email = d.pop("email") 135 136 organization = d.pop("organization") 137 138 job_title = d.pop("jobTitle") 139 140 department = d.pop("department") 141 142 invited_by = d.pop("invitedBy") 143 144 project_assignments = [] 145 _project_assignments = d.pop("projectAssignments") 146 for project_assignments_item_data in _project_assignments: 147 project_assignments_item = UserProjectAssignment.from_dict(project_assignments_item_data) 148 149 project_assignments.append(project_assignments_item) 150 151 groups = cast(list[str], d.pop("groups")) 152 153 settings = UserSettings.from_dict(d.pop("settings")) 154 155 def _parse_sign_up_time(data: object) -> datetime.datetime | None | Unset: 156 if data is None: 157 return data 158 if isinstance(data, Unset): 159 return data 160 try: 161 if not isinstance(data, str): 162 raise TypeError() 163 sign_up_time_type_0 = isoparse(data) 164 165 return sign_up_time_type_0 166 except (TypeError, ValueError, AttributeError, KeyError): 167 pass 168 return cast(datetime.datetime | None | Unset, data) 169 170 sign_up_time = _parse_sign_up_time(d.pop("signUpTime", UNSET)) 171 172 def _parse_last_signed_in(data: object) -> datetime.datetime | None | Unset: 173 if data is None: 174 return data 175 if isinstance(data, Unset): 176 return data 177 try: 178 if not isinstance(data, str): 179 raise TypeError() 180 last_signed_in_type_0 = isoparse(data) 181 182 return last_signed_in_type_0 183 except (TypeError, ValueError, AttributeError, KeyError): 184 pass 185 return cast(datetime.datetime | None | Unset, data) 186 187 last_signed_in = _parse_last_signed_in(d.pop("lastSignedIn", UNSET)) 188 189 user_detail = cls( 190 username=username, 191 name=name, 192 phone=phone, 193 email=email, 194 organization=organization, 195 job_title=job_title, 196 department=department, 197 invited_by=invited_by, 198 project_assignments=project_assignments, 199 groups=groups, 200 settings=settings, 201 sign_up_time=sign_up_time, 202 last_signed_in=last_signed_in, 203 ) 204 205 user_detail.additional_properties = d 206 return user_detail
18@_attrs_define 19class UserProjectAssignment: 20 """ 21 Attributes: 22 project_id (str): 23 role (ProjectRole): 24 created_by (str): 25 created_at (datetime.datetime | None | Unset): 26 """ 27 28 project_id: str 29 role: ProjectRole 30 created_by: str 31 created_at: datetime.datetime | None | Unset = UNSET 32 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 33 34 def to_dict(self) -> dict[str, Any]: 35 project_id = self.project_id 36 37 role = self.role.value 38 39 created_by = self.created_by 40 41 created_at: None | str | Unset 42 if isinstance(self.created_at, Unset): 43 created_at = UNSET 44 elif isinstance(self.created_at, datetime.datetime): 45 created_at = self.created_at.isoformat() 46 else: 47 created_at = self.created_at 48 49 field_dict: dict[str, Any] = {} 50 field_dict.update(self.additional_properties) 51 field_dict.update( 52 { 53 "projectId": project_id, 54 "role": role, 55 "createdBy": created_by, 56 } 57 ) 58 if created_at is not UNSET: 59 field_dict["createdAt"] = created_at 60 61 return field_dict 62 63 @classmethod 64 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 65 d = dict(src_dict) 66 project_id = d.pop("projectId") 67 68 role = ProjectRole(d.pop("role")) 69 70 created_by = d.pop("createdBy") 71 72 def _parse_created_at(data: object) -> datetime.datetime | None | Unset: 73 if data is None: 74 return data 75 if isinstance(data, Unset): 76 return data 77 try: 78 if not isinstance(data, str): 79 raise TypeError() 80 created_at_type_0 = isoparse(data) 81 82 return created_at_type_0 83 except (TypeError, ValueError, AttributeError, KeyError): 84 pass 85 return cast(datetime.datetime | None | Unset, data) 86 87 created_at = _parse_created_at(d.pop("createdAt", UNSET)) 88 89 user_project_assignment = cls( 90 project_id=project_id, 91 role=role, 92 created_by=created_by, 93 created_at=created_at, 94 ) 95 96 user_project_assignment.additional_properties = d 97 return user_project_assignment 98 99 @property 100 def additional_keys(self) -> list[str]: 101 return list(self.additional_properties.keys()) 102 103 def __getitem__(self, key: str) -> Any: 104 return self.additional_properties[key] 105 106 def __setitem__(self, key: str, value: Any) -> None: 107 self.additional_properties[key] = value 108 109 def __delitem__(self, key: str) -> None: 110 del self.additional_properties[key] 111 112 def __contains__(self, key: str) -> bool: 113 return key in self.additional_properties
Attributes:
- project_id (str):
- role (ProjectRole):
- created_by (str):
- created_at (datetime.datetime | None | Unset):
27def __init__(self, project_id, role, created_by, created_at=attr_dict['created_at'].default): 28 self.project_id = project_id 29 self.role = role 30 self.created_by = created_by 31 self.created_at = created_at 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UserProjectAssignment.
34 def to_dict(self) -> dict[str, Any]: 35 project_id = self.project_id 36 37 role = self.role.value 38 39 created_by = self.created_by 40 41 created_at: None | str | Unset 42 if isinstance(self.created_at, Unset): 43 created_at = UNSET 44 elif isinstance(self.created_at, datetime.datetime): 45 created_at = self.created_at.isoformat() 46 else: 47 created_at = self.created_at 48 49 field_dict: dict[str, Any] = {} 50 field_dict.update(self.additional_properties) 51 field_dict.update( 52 { 53 "projectId": project_id, 54 "role": role, 55 "createdBy": created_by, 56 } 57 ) 58 if created_at is not UNSET: 59 field_dict["createdAt"] = created_at 60 61 return field_dict
63 @classmethod 64 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 65 d = dict(src_dict) 66 project_id = d.pop("projectId") 67 68 role = ProjectRole(d.pop("role")) 69 70 created_by = d.pop("createdBy") 71 72 def _parse_created_at(data: object) -> datetime.datetime | None | Unset: 73 if data is None: 74 return data 75 if isinstance(data, Unset): 76 return data 77 try: 78 if not isinstance(data, str): 79 raise TypeError() 80 created_at_type_0 = isoparse(data) 81 82 return created_at_type_0 83 except (TypeError, ValueError, AttributeError, KeyError): 84 pass 85 return cast(datetime.datetime | None | Unset, data) 86 87 created_at = _parse_created_at(d.pop("createdAt", UNSET)) 88 89 user_project_assignment = cls( 90 project_id=project_id, 91 role=role, 92 created_by=created_by, 93 created_at=created_at, 94 ) 95 96 user_project_assignment.additional_properties = d 97 return user_project_assignment
13@_attrs_define 14class UserSettings: 15 """Additional settings for the user 16 17 Attributes: 18 analysis_update_notifications_enabled (bool): 19 """ 20 21 analysis_update_notifications_enabled: bool 22 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> dict[str, Any]: 25 analysis_update_notifications_enabled = self.analysis_update_notifications_enabled 26 27 field_dict: dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "analysisUpdateNotificationsEnabled": analysis_update_notifications_enabled, 32 } 33 ) 34 35 return field_dict 36 37 @classmethod 38 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 39 d = dict(src_dict) 40 analysis_update_notifications_enabled = d.pop("analysisUpdateNotificationsEnabled") 41 42 user_settings = cls( 43 analysis_update_notifications_enabled=analysis_update_notifications_enabled, 44 ) 45 46 user_settings.additional_properties = d 47 return user_settings 48 49 @property 50 def additional_keys(self) -> list[str]: 51 return list(self.additional_properties.keys()) 52 53 def __getitem__(self, key: str) -> Any: 54 return self.additional_properties[key] 55 56 def __setitem__(self, key: str, value: Any) -> None: 57 self.additional_properties[key] = value 58 59 def __delitem__(self, key: str) -> None: 60 del self.additional_properties[key] 61 62 def __contains__(self, key: str) -> bool: 63 return key in self.additional_properties
Additional settings for the user
Attributes:
- analysis_update_notifications_enabled (bool):
24def __init__(self, analysis_update_notifications_enabled): 25 self.analysis_update_notifications_enabled = analysis_update_notifications_enabled 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UserSettings.
24 def to_dict(self) -> dict[str, Any]: 25 analysis_update_notifications_enabled = self.analysis_update_notifications_enabled 26 27 field_dict: dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "analysisUpdateNotificationsEnabled": analysis_update_notifications_enabled, 32 } 33 ) 34 35 return field_dict
37 @classmethod 38 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 39 d = dict(src_dict) 40 analysis_update_notifications_enabled = d.pop("analysisUpdateNotificationsEnabled") 41 42 user_settings = cls( 43 analysis_update_notifications_enabled=analysis_update_notifications_enabled, 44 ) 45 46 user_settings.additional_properties = d 47 return user_settings
13@_attrs_define 14class ValidateFileNamePatternsRequest: 15 """ 16 Attributes: 17 file_names (list[str]): 18 file_name_patterns (list[str]): 19 """ 20 21 file_names: list[str] 22 file_name_patterns: list[str] 23 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 24 25 def to_dict(self) -> dict[str, Any]: 26 file_names = self.file_names 27 28 file_name_patterns = self.file_name_patterns 29 30 field_dict: dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "fileNames": file_names, 35 "fileNamePatterns": file_name_patterns, 36 } 37 ) 38 39 return field_dict 40 41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 file_names = cast(list[str], d.pop("fileNames")) 45 46 file_name_patterns = cast(list[str], d.pop("fileNamePatterns")) 47 48 validate_file_name_patterns_request = cls( 49 file_names=file_names, 50 file_name_patterns=file_name_patterns, 51 ) 52 53 validate_file_name_patterns_request.additional_properties = d 54 return validate_file_name_patterns_request 55 56 @property 57 def additional_keys(self) -> list[str]: 58 return list(self.additional_properties.keys()) 59 60 def __getitem__(self, key: str) -> Any: 61 return self.additional_properties[key] 62 63 def __setitem__(self, key: str, value: Any) -> None: 64 self.additional_properties[key] = value 65 66 def __delitem__(self, key: str) -> None: 67 del self.additional_properties[key] 68 69 def __contains__(self, key: str) -> bool: 70 return key in self.additional_properties
Attributes:
- file_names (list[str]):
- file_name_patterns (list[str]):
25def __init__(self, file_names, file_name_patterns): 26 self.file_names = file_names 27 self.file_name_patterns = file_name_patterns 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ValidateFileNamePatternsRequest.
25 def to_dict(self) -> dict[str, Any]: 26 file_names = self.file_names 27 28 file_name_patterns = self.file_name_patterns 29 30 field_dict: dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "fileNames": file_names, 35 "fileNamePatterns": file_name_patterns, 36 } 37 ) 38 39 return field_dict
41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 file_names = cast(list[str], d.pop("fileNames")) 45 46 file_name_patterns = cast(list[str], d.pop("fileNamePatterns")) 47 48 validate_file_name_patterns_request = cls( 49 file_names=file_names, 50 file_name_patterns=file_name_patterns, 51 ) 52 53 validate_file_name_patterns_request.additional_properties = d 54 return validate_file_name_patterns_request
13@_attrs_define 14class ValidateFileRequirementsRequest: 15 """ 16 Attributes: 17 file_names (list[str]): 18 sample_sheet (str): 19 """ 20 21 file_names: list[str] 22 sample_sheet: str 23 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 24 25 def to_dict(self) -> dict[str, Any]: 26 file_names = self.file_names 27 28 sample_sheet = self.sample_sheet 29 30 field_dict: dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "fileNames": file_names, 35 "sampleSheet": sample_sheet, 36 } 37 ) 38 39 return field_dict 40 41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 file_names = cast(list[str], d.pop("fileNames")) 45 46 sample_sheet = d.pop("sampleSheet") 47 48 validate_file_requirements_request = cls( 49 file_names=file_names, 50 sample_sheet=sample_sheet, 51 ) 52 53 validate_file_requirements_request.additional_properties = d 54 return validate_file_requirements_request 55 56 @property 57 def additional_keys(self) -> list[str]: 58 return list(self.additional_properties.keys()) 59 60 def __getitem__(self, key: str) -> Any: 61 return self.additional_properties[key] 62 63 def __setitem__(self, key: str, value: Any) -> None: 64 self.additional_properties[key] = value 65 66 def __delitem__(self, key: str) -> None: 67 del self.additional_properties[key] 68 69 def __contains__(self, key: str) -> bool: 70 return key in self.additional_properties
Attributes:
- file_names (list[str]):
- sample_sheet (str):
25def __init__(self, file_names, sample_sheet): 26 self.file_names = file_names 27 self.sample_sheet = sample_sheet 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ValidateFileRequirementsRequest.
25 def to_dict(self) -> dict[str, Any]: 26 file_names = self.file_names 27 28 sample_sheet = self.sample_sheet 29 30 field_dict: dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "fileNames": file_names, 35 "sampleSheet": sample_sheet, 36 } 37 ) 38 39 return field_dict
41 @classmethod 42 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 43 d = dict(src_dict) 44 file_names = cast(list[str], d.pop("fileNames")) 45 46 sample_sheet = d.pop("sampleSheet") 47 48 validate_file_requirements_request = cls( 49 file_names=file_names, 50 sample_sheet=sample_sheet, 51 ) 52 53 validate_file_requirements_request.additional_properties = d 54 return validate_file_requirements_request
13@_attrs_define 14class VersionSpecification: 15 """ 16 Attributes: 17 version (str): 18 is_default (bool): 19 is_latest (bool): 20 """ 21 22 version: str 23 is_default: bool 24 is_latest: bool 25 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> dict[str, Any]: 28 version = self.version 29 30 is_default = self.is_default 31 32 is_latest = self.is_latest 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "version": version, 39 "isDefault": is_default, 40 "isLatest": is_latest, 41 } 42 ) 43 44 return field_dict 45 46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 version = d.pop("version") 50 51 is_default = d.pop("isDefault") 52 53 is_latest = d.pop("isLatest") 54 55 version_specification = cls( 56 version=version, 57 is_default=is_default, 58 is_latest=is_latest, 59 ) 60 61 version_specification.additional_properties = d 62 return version_specification 63 64 @property 65 def additional_keys(self) -> list[str]: 66 return list(self.additional_properties.keys()) 67 68 def __getitem__(self, key: str) -> Any: 69 return self.additional_properties[key] 70 71 def __setitem__(self, key: str, value: Any) -> None: 72 self.additional_properties[key] = value 73 74 def __delitem__(self, key: str) -> None: 75 del self.additional_properties[key] 76 77 def __contains__(self, key: str) -> bool: 78 return key in self.additional_properties
Attributes:
- version (str):
- is_default (bool):
- is_latest (bool):
26def __init__(self, version, is_default, is_latest): 27 self.version = version 28 self.is_default = is_default 29 self.is_latest = is_latest 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class VersionSpecification.
27 def to_dict(self) -> dict[str, Any]: 28 version = self.version 29 30 is_default = self.is_default 31 32 is_latest = self.is_latest 33 34 field_dict: dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "version": version, 39 "isDefault": is_default, 40 "isLatest": is_latest, 41 } 42 ) 43 44 return field_dict
46 @classmethod 47 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 48 d = dict(src_dict) 49 version = d.pop("version") 50 51 is_default = d.pop("isDefault") 52 53 is_latest = d.pop("isLatest") 54 55 version_specification = cls( 56 version=version, 57 is_default=is_default, 58 is_latest=is_latest, 59 ) 60 61 version_specification.additional_properties = d 62 return version_specification
25@_attrs_define 26class Workspace: 27 """ 28 Attributes: 29 id (str): 30 name (str): 31 description (str): 32 project_id (str): 33 status (Status): 34 status_message (str): 35 environment_id (str): 36 mounted_datasets (list[MountedDataset]): 37 compute_config (WorkspaceComputeConfig): Configuration parameters for a containerized workspace compute 38 environment. 39 sharing_type (SharingType): 40 created_by (str): 41 created_at (datetime.datetime): 42 updated_at (datetime.datetime): 43 auto_stop_timeout (int | None | Unset): 44 sessions (list[WorkspaceSession] | None | Unset): 45 started_at (datetime.datetime | None | Unset): 46 auto_stop_time (datetime.datetime | None | Unset): 47 """ 48 49 id: str 50 name: str 51 description: str 52 project_id: str 53 status: Status 54 status_message: str 55 environment_id: str 56 mounted_datasets: list[MountedDataset] 57 compute_config: WorkspaceComputeConfig 58 sharing_type: SharingType 59 created_by: str 60 created_at: datetime.datetime 61 updated_at: datetime.datetime 62 auto_stop_timeout: int | None | Unset = UNSET 63 sessions: list[WorkspaceSession] | None | Unset = UNSET 64 started_at: datetime.datetime | None | Unset = UNSET 65 auto_stop_time: datetime.datetime | None | Unset = UNSET 66 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 67 68 def to_dict(self) -> dict[str, Any]: 69 id = self.id 70 71 name = self.name 72 73 description = self.description 74 75 project_id = self.project_id 76 77 status = self.status.value 78 79 status_message = self.status_message 80 81 environment_id = self.environment_id 82 83 mounted_datasets = [] 84 for mounted_datasets_item_data in self.mounted_datasets: 85 mounted_datasets_item = mounted_datasets_item_data.to_dict() 86 mounted_datasets.append(mounted_datasets_item) 87 88 compute_config = self.compute_config.to_dict() 89 90 sharing_type = self.sharing_type.value 91 92 created_by = self.created_by 93 94 created_at = self.created_at.isoformat() 95 96 updated_at = self.updated_at.isoformat() 97 98 auto_stop_timeout: int | None | Unset 99 if isinstance(self.auto_stop_timeout, Unset): 100 auto_stop_timeout = UNSET 101 else: 102 auto_stop_timeout = self.auto_stop_timeout 103 104 sessions: list[dict[str, Any]] | None | Unset 105 if isinstance(self.sessions, Unset): 106 sessions = UNSET 107 elif isinstance(self.sessions, list): 108 sessions = [] 109 for sessions_type_0_item_data in self.sessions: 110 sessions_type_0_item = sessions_type_0_item_data.to_dict() 111 sessions.append(sessions_type_0_item) 112 113 else: 114 sessions = self.sessions 115 116 started_at: None | str | Unset 117 if isinstance(self.started_at, Unset): 118 started_at = UNSET 119 elif isinstance(self.started_at, datetime.datetime): 120 started_at = self.started_at.isoformat() 121 else: 122 started_at = self.started_at 123 124 auto_stop_time: None | str | Unset 125 if isinstance(self.auto_stop_time, Unset): 126 auto_stop_time = UNSET 127 elif isinstance(self.auto_stop_time, datetime.datetime): 128 auto_stop_time = self.auto_stop_time.isoformat() 129 else: 130 auto_stop_time = self.auto_stop_time 131 132 field_dict: dict[str, Any] = {} 133 field_dict.update(self.additional_properties) 134 field_dict.update( 135 { 136 "id": id, 137 "name": name, 138 "description": description, 139 "projectId": project_id, 140 "status": status, 141 "statusMessage": status_message, 142 "environmentId": environment_id, 143 "mountedDatasets": mounted_datasets, 144 "computeConfig": compute_config, 145 "sharingType": sharing_type, 146 "createdBy": created_by, 147 "createdAt": created_at, 148 "updatedAt": updated_at, 149 } 150 ) 151 if auto_stop_timeout is not UNSET: 152 field_dict["autoStopTimeout"] = auto_stop_timeout 153 if sessions is not UNSET: 154 field_dict["sessions"] = sessions 155 if started_at is not UNSET: 156 field_dict["startedAt"] = started_at 157 if auto_stop_time is not UNSET: 158 field_dict["autoStopTime"] = auto_stop_time 159 160 return field_dict 161 162 @classmethod 163 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 164 from ..models.mounted_dataset import MountedDataset 165 from ..models.workspace_compute_config import WorkspaceComputeConfig 166 from ..models.workspace_session import WorkspaceSession 167 168 d = dict(src_dict) 169 id = d.pop("id") 170 171 name = d.pop("name") 172 173 description = d.pop("description") 174 175 project_id = d.pop("projectId") 176 177 status = Status(d.pop("status")) 178 179 status_message = d.pop("statusMessage") 180 181 environment_id = d.pop("environmentId") 182 183 mounted_datasets = [] 184 _mounted_datasets = d.pop("mountedDatasets") 185 for mounted_datasets_item_data in _mounted_datasets: 186 mounted_datasets_item = MountedDataset.from_dict(mounted_datasets_item_data) 187 188 mounted_datasets.append(mounted_datasets_item) 189 190 compute_config = WorkspaceComputeConfig.from_dict(d.pop("computeConfig")) 191 192 sharing_type = SharingType(d.pop("sharingType")) 193 194 created_by = d.pop("createdBy") 195 196 created_at = isoparse(d.pop("createdAt")) 197 198 updated_at = isoparse(d.pop("updatedAt")) 199 200 def _parse_auto_stop_timeout(data: object) -> int | None | Unset: 201 if data is None: 202 return data 203 if isinstance(data, Unset): 204 return data 205 return cast(int | None | Unset, data) 206 207 auto_stop_timeout = _parse_auto_stop_timeout(d.pop("autoStopTimeout", UNSET)) 208 209 def _parse_sessions(data: object) -> list[WorkspaceSession] | None | Unset: 210 if data is None: 211 return data 212 if isinstance(data, Unset): 213 return data 214 try: 215 if not isinstance(data, list): 216 raise TypeError() 217 sessions_type_0 = [] 218 _sessions_type_0 = data 219 for sessions_type_0_item_data in _sessions_type_0: 220 sessions_type_0_item = WorkspaceSession.from_dict(sessions_type_0_item_data) 221 222 sessions_type_0.append(sessions_type_0_item) 223 224 return sessions_type_0 225 except (TypeError, ValueError, AttributeError, KeyError): 226 pass 227 return cast(list[WorkspaceSession] | None | Unset, data) 228 229 sessions = _parse_sessions(d.pop("sessions", UNSET)) 230 231 def _parse_started_at(data: object) -> datetime.datetime | None | Unset: 232 if data is None: 233 return data 234 if isinstance(data, Unset): 235 return data 236 try: 237 if not isinstance(data, str): 238 raise TypeError() 239 started_at_type_0 = isoparse(data) 240 241 return started_at_type_0 242 except (TypeError, ValueError, AttributeError, KeyError): 243 pass 244 return cast(datetime.datetime | None | Unset, data) 245 246 started_at = _parse_started_at(d.pop("startedAt", UNSET)) 247 248 def _parse_auto_stop_time(data: object) -> datetime.datetime | None | Unset: 249 if data is None: 250 return data 251 if isinstance(data, Unset): 252 return data 253 try: 254 if not isinstance(data, str): 255 raise TypeError() 256 auto_stop_time_type_0 = isoparse(data) 257 258 return auto_stop_time_type_0 259 except (TypeError, ValueError, AttributeError, KeyError): 260 pass 261 return cast(datetime.datetime | None | Unset, data) 262 263 auto_stop_time = _parse_auto_stop_time(d.pop("autoStopTime", UNSET)) 264 265 workspace = cls( 266 id=id, 267 name=name, 268 description=description, 269 project_id=project_id, 270 status=status, 271 status_message=status_message, 272 environment_id=environment_id, 273 mounted_datasets=mounted_datasets, 274 compute_config=compute_config, 275 sharing_type=sharing_type, 276 created_by=created_by, 277 created_at=created_at, 278 updated_at=updated_at, 279 auto_stop_timeout=auto_stop_timeout, 280 sessions=sessions, 281 started_at=started_at, 282 auto_stop_time=auto_stop_time, 283 ) 284 285 workspace.additional_properties = d 286 return workspace 287 288 @property 289 def additional_keys(self) -> list[str]: 290 return list(self.additional_properties.keys()) 291 292 def __getitem__(self, key: str) -> Any: 293 return self.additional_properties[key] 294 295 def __setitem__(self, key: str, value: Any) -> None: 296 self.additional_properties[key] = value 297 298 def __delitem__(self, key: str) -> None: 299 del self.additional_properties[key] 300 301 def __contains__(self, key: str) -> bool: 302 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- description (str):
- project_id (str):
- status (Status):
- status_message (str):
- environment_id (str):
- mounted_datasets (list[MountedDataset]):
- compute_config (WorkspaceComputeConfig): Configuration parameters for a containerized workspace compute environment.
- sharing_type (SharingType):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- auto_stop_timeout (int | None | Unset):
- sessions (list[WorkspaceSession] | None | Unset):
- started_at (datetime.datetime | None | Unset):
- auto_stop_time (datetime.datetime | None | Unset):
40def __init__(self, id, name, description, project_id, status, status_message, environment_id, mounted_datasets, compute_config, sharing_type, created_by, created_at, updated_at, auto_stop_timeout=attr_dict['auto_stop_timeout'].default, sessions=attr_dict['sessions'].default, started_at=attr_dict['started_at'].default, auto_stop_time=attr_dict['auto_stop_time'].default): 41 self.id = id 42 self.name = name 43 self.description = description 44 self.project_id = project_id 45 self.status = status 46 self.status_message = status_message 47 self.environment_id = environment_id 48 self.mounted_datasets = mounted_datasets 49 self.compute_config = compute_config 50 self.sharing_type = sharing_type 51 self.created_by = created_by 52 self.created_at = created_at 53 self.updated_at = updated_at 54 self.auto_stop_timeout = auto_stop_timeout 55 self.sessions = sessions 56 self.started_at = started_at 57 self.auto_stop_time = auto_stop_time 58 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Workspace.
68 def to_dict(self) -> dict[str, Any]: 69 id = self.id 70 71 name = self.name 72 73 description = self.description 74 75 project_id = self.project_id 76 77 status = self.status.value 78 79 status_message = self.status_message 80 81 environment_id = self.environment_id 82 83 mounted_datasets = [] 84 for mounted_datasets_item_data in self.mounted_datasets: 85 mounted_datasets_item = mounted_datasets_item_data.to_dict() 86 mounted_datasets.append(mounted_datasets_item) 87 88 compute_config = self.compute_config.to_dict() 89 90 sharing_type = self.sharing_type.value 91 92 created_by = self.created_by 93 94 created_at = self.created_at.isoformat() 95 96 updated_at = self.updated_at.isoformat() 97 98 auto_stop_timeout: int | None | Unset 99 if isinstance(self.auto_stop_timeout, Unset): 100 auto_stop_timeout = UNSET 101 else: 102 auto_stop_timeout = self.auto_stop_timeout 103 104 sessions: list[dict[str, Any]] | None | Unset 105 if isinstance(self.sessions, Unset): 106 sessions = UNSET 107 elif isinstance(self.sessions, list): 108 sessions = [] 109 for sessions_type_0_item_data in self.sessions: 110 sessions_type_0_item = sessions_type_0_item_data.to_dict() 111 sessions.append(sessions_type_0_item) 112 113 else: 114 sessions = self.sessions 115 116 started_at: None | str | Unset 117 if isinstance(self.started_at, Unset): 118 started_at = UNSET 119 elif isinstance(self.started_at, datetime.datetime): 120 started_at = self.started_at.isoformat() 121 else: 122 started_at = self.started_at 123 124 auto_stop_time: None | str | Unset 125 if isinstance(self.auto_stop_time, Unset): 126 auto_stop_time = UNSET 127 elif isinstance(self.auto_stop_time, datetime.datetime): 128 auto_stop_time = self.auto_stop_time.isoformat() 129 else: 130 auto_stop_time = self.auto_stop_time 131 132 field_dict: dict[str, Any] = {} 133 field_dict.update(self.additional_properties) 134 field_dict.update( 135 { 136 "id": id, 137 "name": name, 138 "description": description, 139 "projectId": project_id, 140 "status": status, 141 "statusMessage": status_message, 142 "environmentId": environment_id, 143 "mountedDatasets": mounted_datasets, 144 "computeConfig": compute_config, 145 "sharingType": sharing_type, 146 "createdBy": created_by, 147 "createdAt": created_at, 148 "updatedAt": updated_at, 149 } 150 ) 151 if auto_stop_timeout is not UNSET: 152 field_dict["autoStopTimeout"] = auto_stop_timeout 153 if sessions is not UNSET: 154 field_dict["sessions"] = sessions 155 if started_at is not UNSET: 156 field_dict["startedAt"] = started_at 157 if auto_stop_time is not UNSET: 158 field_dict["autoStopTime"] = auto_stop_time 159 160 return field_dict
162 @classmethod 163 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 164 from ..models.mounted_dataset import MountedDataset 165 from ..models.workspace_compute_config import WorkspaceComputeConfig 166 from ..models.workspace_session import WorkspaceSession 167 168 d = dict(src_dict) 169 id = d.pop("id") 170 171 name = d.pop("name") 172 173 description = d.pop("description") 174 175 project_id = d.pop("projectId") 176 177 status = Status(d.pop("status")) 178 179 status_message = d.pop("statusMessage") 180 181 environment_id = d.pop("environmentId") 182 183 mounted_datasets = [] 184 _mounted_datasets = d.pop("mountedDatasets") 185 for mounted_datasets_item_data in _mounted_datasets: 186 mounted_datasets_item = MountedDataset.from_dict(mounted_datasets_item_data) 187 188 mounted_datasets.append(mounted_datasets_item) 189 190 compute_config = WorkspaceComputeConfig.from_dict(d.pop("computeConfig")) 191 192 sharing_type = SharingType(d.pop("sharingType")) 193 194 created_by = d.pop("createdBy") 195 196 created_at = isoparse(d.pop("createdAt")) 197 198 updated_at = isoparse(d.pop("updatedAt")) 199 200 def _parse_auto_stop_timeout(data: object) -> int | None | Unset: 201 if data is None: 202 return data 203 if isinstance(data, Unset): 204 return data 205 return cast(int | None | Unset, data) 206 207 auto_stop_timeout = _parse_auto_stop_timeout(d.pop("autoStopTimeout", UNSET)) 208 209 def _parse_sessions(data: object) -> list[WorkspaceSession] | None | Unset: 210 if data is None: 211 return data 212 if isinstance(data, Unset): 213 return data 214 try: 215 if not isinstance(data, list): 216 raise TypeError() 217 sessions_type_0 = [] 218 _sessions_type_0 = data 219 for sessions_type_0_item_data in _sessions_type_0: 220 sessions_type_0_item = WorkspaceSession.from_dict(sessions_type_0_item_data) 221 222 sessions_type_0.append(sessions_type_0_item) 223 224 return sessions_type_0 225 except (TypeError, ValueError, AttributeError, KeyError): 226 pass 227 return cast(list[WorkspaceSession] | None | Unset, data) 228 229 sessions = _parse_sessions(d.pop("sessions", UNSET)) 230 231 def _parse_started_at(data: object) -> datetime.datetime | None | Unset: 232 if data is None: 233 return data 234 if isinstance(data, Unset): 235 return data 236 try: 237 if not isinstance(data, str): 238 raise TypeError() 239 started_at_type_0 = isoparse(data) 240 241 return started_at_type_0 242 except (TypeError, ValueError, AttributeError, KeyError): 243 pass 244 return cast(datetime.datetime | None | Unset, data) 245 246 started_at = _parse_started_at(d.pop("startedAt", UNSET)) 247 248 def _parse_auto_stop_time(data: object) -> datetime.datetime | None | Unset: 249 if data is None: 250 return data 251 if isinstance(data, Unset): 252 return data 253 try: 254 if not isinstance(data, str): 255 raise TypeError() 256 auto_stop_time_type_0 = isoparse(data) 257 258 return auto_stop_time_type_0 259 except (TypeError, ValueError, AttributeError, KeyError): 260 pass 261 return cast(datetime.datetime | None | Unset, data) 262 263 auto_stop_time = _parse_auto_stop_time(d.pop("autoStopTime", UNSET)) 264 265 workspace = cls( 266 id=id, 267 name=name, 268 description=description, 269 project_id=project_id, 270 status=status, 271 status_message=status_message, 272 environment_id=environment_id, 273 mounted_datasets=mounted_datasets, 274 compute_config=compute_config, 275 sharing_type=sharing_type, 276 created_by=created_by, 277 created_at=created_at, 278 updated_at=updated_at, 279 auto_stop_timeout=auto_stop_timeout, 280 sessions=sessions, 281 started_at=started_at, 282 auto_stop_time=auto_stop_time, 283 ) 284 285 workspace.additional_properties = d 286 return workspace
19@_attrs_define 20class WorkspaceComputeConfig: 21 """Configuration parameters for a containerized workspace compute environment. 22 23 Attributes: 24 container_image_uri (str): Fully qualified container image URI (including registry, repository, and tag). 25 cpu (int | Unset): Number of vCPU cores allocated to the workspace. Example: 4. 26 memory_gi_b (int | Unset): Memory allocated to the workspace container in GiB. Example: 8. 27 volume_size_gi_b (int | Unset): Persistent storage volume size allocated to the workspace in GiB. Example: 50. 28 gpu (int | Unset): Number of GPUs allocated to the workspace Example: 1. 29 environment_variables (None | Unset | WorkspaceComputeConfigEnvironmentVariables): Map of environment variables 30 injected into the container at runtime. Keys must be non-blank. Example: {'ENV_MODE': 'production', 'LOG_LEVEL': 31 'debug'}. 32 local_port (int | Unset): User-facing web server port (http). Example: 8080. 33 """ 34 35 container_image_uri: str 36 cpu: int | Unset = UNSET 37 memory_gi_b: int | Unset = UNSET 38 volume_size_gi_b: int | Unset = UNSET 39 gpu: int | Unset = UNSET 40 environment_variables: None | Unset | WorkspaceComputeConfigEnvironmentVariables = UNSET 41 local_port: int | Unset = UNSET 42 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 43 44 def to_dict(self) -> dict[str, Any]: 45 from ..models.workspace_compute_config_environment_variables import WorkspaceComputeConfigEnvironmentVariables 46 47 container_image_uri = self.container_image_uri 48 49 cpu = self.cpu 50 51 memory_gi_b = self.memory_gi_b 52 53 volume_size_gi_b = self.volume_size_gi_b 54 55 gpu = self.gpu 56 57 environment_variables: dict[str, Any] | None | Unset 58 if isinstance(self.environment_variables, Unset): 59 environment_variables = UNSET 60 elif isinstance(self.environment_variables, WorkspaceComputeConfigEnvironmentVariables): 61 environment_variables = self.environment_variables.to_dict() 62 else: 63 environment_variables = self.environment_variables 64 65 local_port = self.local_port 66 67 field_dict: dict[str, Any] = {} 68 field_dict.update(self.additional_properties) 69 field_dict.update( 70 { 71 "containerImageUri": container_image_uri, 72 } 73 ) 74 if cpu is not UNSET: 75 field_dict["cpu"] = cpu 76 if memory_gi_b is not UNSET: 77 field_dict["memoryGiB"] = memory_gi_b 78 if volume_size_gi_b is not UNSET: 79 field_dict["volumeSizeGiB"] = volume_size_gi_b 80 if gpu is not UNSET: 81 field_dict["gpu"] = gpu 82 if environment_variables is not UNSET: 83 field_dict["environmentVariables"] = environment_variables 84 if local_port is not UNSET: 85 field_dict["localPort"] = local_port 86 87 return field_dict 88 89 @classmethod 90 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 91 from ..models.workspace_compute_config_environment_variables import WorkspaceComputeConfigEnvironmentVariables 92 93 d = dict(src_dict) 94 container_image_uri = d.pop("containerImageUri") 95 96 cpu = d.pop("cpu", UNSET) 97 98 memory_gi_b = d.pop("memoryGiB", UNSET) 99 100 volume_size_gi_b = d.pop("volumeSizeGiB", UNSET) 101 102 gpu = d.pop("gpu", UNSET) 103 104 def _parse_environment_variables(data: object) -> None | Unset | WorkspaceComputeConfigEnvironmentVariables: 105 if data is None: 106 return data 107 if isinstance(data, Unset): 108 return data 109 try: 110 if not isinstance(data, dict): 111 raise TypeError() 112 environment_variables_type_0 = WorkspaceComputeConfigEnvironmentVariables.from_dict(data) 113 114 return environment_variables_type_0 115 except (TypeError, ValueError, AttributeError, KeyError): 116 pass 117 return cast(None | Unset | WorkspaceComputeConfigEnvironmentVariables, data) 118 119 environment_variables = _parse_environment_variables(d.pop("environmentVariables", UNSET)) 120 121 local_port = d.pop("localPort", UNSET) 122 123 workspace_compute_config = cls( 124 container_image_uri=container_image_uri, 125 cpu=cpu, 126 memory_gi_b=memory_gi_b, 127 volume_size_gi_b=volume_size_gi_b, 128 gpu=gpu, 129 environment_variables=environment_variables, 130 local_port=local_port, 131 ) 132 133 workspace_compute_config.additional_properties = d 134 return workspace_compute_config 135 136 @property 137 def additional_keys(self) -> list[str]: 138 return list(self.additional_properties.keys()) 139 140 def __getitem__(self, key: str) -> Any: 141 return self.additional_properties[key] 142 143 def __setitem__(self, key: str, value: Any) -> None: 144 self.additional_properties[key] = value 145 146 def __delitem__(self, key: str) -> None: 147 del self.additional_properties[key] 148 149 def __contains__(self, key: str) -> bool: 150 return key in self.additional_properties
Configuration parameters for a containerized workspace compute environment.
Attributes:
- container_image_uri (str): Fully qualified container image URI (including registry, repository, and tag).
- cpu (int | Unset): Number of vCPU cores allocated to the workspace. Example: 4.
- memory_gi_b (int | Unset): Memory allocated to the workspace container in GiB. Example: 8.
- volume_size_gi_b (int | Unset): Persistent storage volume size allocated to the workspace in GiB. Example: 50.
- gpu (int | Unset): Number of GPUs allocated to the workspace Example: 1.
- environment_variables (None | Unset | WorkspaceComputeConfigEnvironmentVariables): Map of environment variables injected into the container at runtime. Keys must be non-blank. Example: {'ENV_MODE': 'production', 'LOG_LEVEL': 'debug'}.
- local_port (int | Unset): User-facing web server port (http). Example: 8080.
30def __init__(self, container_image_uri, cpu=attr_dict['cpu'].default, memory_gi_b=attr_dict['memory_gi_b'].default, volume_size_gi_b=attr_dict['volume_size_gi_b'].default, gpu=attr_dict['gpu'].default, environment_variables=attr_dict['environment_variables'].default, local_port=attr_dict['local_port'].default): 31 self.container_image_uri = container_image_uri 32 self.cpu = cpu 33 self.memory_gi_b = memory_gi_b 34 self.volume_size_gi_b = volume_size_gi_b 35 self.gpu = gpu 36 self.environment_variables = environment_variables 37 self.local_port = local_port 38 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class WorkspaceComputeConfig.
44 def to_dict(self) -> dict[str, Any]: 45 from ..models.workspace_compute_config_environment_variables import WorkspaceComputeConfigEnvironmentVariables 46 47 container_image_uri = self.container_image_uri 48 49 cpu = self.cpu 50 51 memory_gi_b = self.memory_gi_b 52 53 volume_size_gi_b = self.volume_size_gi_b 54 55 gpu = self.gpu 56 57 environment_variables: dict[str, Any] | None | Unset 58 if isinstance(self.environment_variables, Unset): 59 environment_variables = UNSET 60 elif isinstance(self.environment_variables, WorkspaceComputeConfigEnvironmentVariables): 61 environment_variables = self.environment_variables.to_dict() 62 else: 63 environment_variables = self.environment_variables 64 65 local_port = self.local_port 66 67 field_dict: dict[str, Any] = {} 68 field_dict.update(self.additional_properties) 69 field_dict.update( 70 { 71 "containerImageUri": container_image_uri, 72 } 73 ) 74 if cpu is not UNSET: 75 field_dict["cpu"] = cpu 76 if memory_gi_b is not UNSET: 77 field_dict["memoryGiB"] = memory_gi_b 78 if volume_size_gi_b is not UNSET: 79 field_dict["volumeSizeGiB"] = volume_size_gi_b 80 if gpu is not UNSET: 81 field_dict["gpu"] = gpu 82 if environment_variables is not UNSET: 83 field_dict["environmentVariables"] = environment_variables 84 if local_port is not UNSET: 85 field_dict["localPort"] = local_port 86 87 return field_dict
89 @classmethod 90 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 91 from ..models.workspace_compute_config_environment_variables import WorkspaceComputeConfigEnvironmentVariables 92 93 d = dict(src_dict) 94 container_image_uri = d.pop("containerImageUri") 95 96 cpu = d.pop("cpu", UNSET) 97 98 memory_gi_b = d.pop("memoryGiB", UNSET) 99 100 volume_size_gi_b = d.pop("volumeSizeGiB", UNSET) 101 102 gpu = d.pop("gpu", UNSET) 103 104 def _parse_environment_variables(data: object) -> None | Unset | WorkspaceComputeConfigEnvironmentVariables: 105 if data is None: 106 return data 107 if isinstance(data, Unset): 108 return data 109 try: 110 if not isinstance(data, dict): 111 raise TypeError() 112 environment_variables_type_0 = WorkspaceComputeConfigEnvironmentVariables.from_dict(data) 113 114 return environment_variables_type_0 115 except (TypeError, ValueError, AttributeError, KeyError): 116 pass 117 return cast(None | Unset | WorkspaceComputeConfigEnvironmentVariables, data) 118 119 environment_variables = _parse_environment_variables(d.pop("environmentVariables", UNSET)) 120 121 local_port = d.pop("localPort", UNSET) 122 123 workspace_compute_config = cls( 124 container_image_uri=container_image_uri, 125 cpu=cpu, 126 memory_gi_b=memory_gi_b, 127 volume_size_gi_b=volume_size_gi_b, 128 gpu=gpu, 129 environment_variables=environment_variables, 130 local_port=local_port, 131 ) 132 133 workspace_compute_config.additional_properties = d 134 return workspace_compute_config
13@_attrs_define 14class WorkspaceComputeConfigEnvironmentVariables: 15 """Map of environment variables injected into the container at runtime. Keys must be non-blank. 16 17 Example: 18 {'ENV_MODE': 'production', 'LOG_LEVEL': 'debug'} 19 20 """ 21 22 additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> dict[str, Any]: 25 field_dict: dict[str, Any] = {} 26 field_dict.update(self.additional_properties) 27 28 return field_dict 29 30 @classmethod 31 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 32 d = dict(src_dict) 33 workspace_compute_config_environment_variables = cls() 34 35 workspace_compute_config_environment_variables.additional_properties = d 36 return workspace_compute_config_environment_variables 37 38 @property 39 def additional_keys(self) -> list[str]: 40 return list(self.additional_properties.keys()) 41 42 def __getitem__(self, key: str) -> str: 43 return self.additional_properties[key] 44 45 def __setitem__(self, key: str, value: str) -> None: 46 self.additional_properties[key] = value 47 48 def __delitem__(self, key: str) -> None: 49 del self.additional_properties[key] 50 51 def __contains__(self, key: str) -> bool: 52 return key in self.additional_properties
Map of environment variables injected into the container at runtime. Keys must be non-blank.
Example:
{'ENV_MODE': 'production', 'LOG_LEVEL': 'debug'}
Method generated by attrs for class WorkspaceComputeConfigEnvironmentVariables.
15@_attrs_define 16class WorkspaceConnectionResponse: 17 """ 18 Attributes: 19 connection_url (str): 20 expires_at (datetime.datetime): 21 message (str): 22 """ 23 24 connection_url: str 25 expires_at: datetime.datetime 26 message: str 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 connection_url = self.connection_url 31 32 expires_at = self.expires_at.isoformat() 33 34 message = self.message 35 36 field_dict: dict[str, Any] = {} 37 field_dict.update(self.additional_properties) 38 field_dict.update( 39 { 40 "connectionUrl": connection_url, 41 "expiresAt": expires_at, 42 "message": message, 43 } 44 ) 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 d = dict(src_dict) 51 connection_url = d.pop("connectionUrl") 52 53 expires_at = isoparse(d.pop("expiresAt")) 54 55 message = d.pop("message") 56 57 workspace_connection_response = cls( 58 connection_url=connection_url, 59 expires_at=expires_at, 60 message=message, 61 ) 62 63 workspace_connection_response.additional_properties = d 64 return workspace_connection_response 65 66 @property 67 def additional_keys(self) -> list[str]: 68 return list(self.additional_properties.keys()) 69 70 def __getitem__(self, key: str) -> Any: 71 return self.additional_properties[key] 72 73 def __setitem__(self, key: str, value: Any) -> None: 74 self.additional_properties[key] = value 75 76 def __delitem__(self, key: str) -> None: 77 del self.additional_properties[key] 78 79 def __contains__(self, key: str) -> bool: 80 return key in self.additional_properties
Attributes:
- connection_url (str):
- expires_at (datetime.datetime):
- message (str):
26def __init__(self, connection_url, expires_at, message): 27 self.connection_url = connection_url 28 self.expires_at = expires_at 29 self.message = message 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class WorkspaceConnectionResponse.
29 def to_dict(self) -> dict[str, Any]: 30 connection_url = self.connection_url 31 32 expires_at = self.expires_at.isoformat() 33 34 message = self.message 35 36 field_dict: dict[str, Any] = {} 37 field_dict.update(self.additional_properties) 38 field_dict.update( 39 { 40 "connectionUrl": connection_url, 41 "expiresAt": expires_at, 42 "message": message, 43 } 44 ) 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 d = dict(src_dict) 51 connection_url = d.pop("connectionUrl") 52 53 expires_at = isoparse(d.pop("expiresAt")) 54 55 message = d.pop("message") 56 57 workspace_connection_response = cls( 58 connection_url=connection_url, 59 expires_at=expires_at, 60 message=message, 61 ) 62 63 workspace_connection_response.additional_properties = d 64 return workspace_connection_response
18@_attrs_define 19class WorkspaceEnvironment: 20 """ 21 Attributes: 22 id (str): 23 name (str): 24 description (str): 25 category (str): 26 default_compute_config (WorkspaceComputeConfig): Configuration parameters for a containerized workspace compute 27 environment. 28 versions (list[VersionSpecification]): 29 owner (str): 30 """ 31 32 id: str 33 name: str 34 description: str 35 category: str 36 default_compute_config: WorkspaceComputeConfig 37 versions: list[VersionSpecification] 38 owner: str 39 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 40 41 def to_dict(self) -> dict[str, Any]: 42 id = self.id 43 44 name = self.name 45 46 description = self.description 47 48 category = self.category 49 50 default_compute_config = self.default_compute_config.to_dict() 51 52 versions = [] 53 for versions_item_data in self.versions: 54 versions_item = versions_item_data.to_dict() 55 versions.append(versions_item) 56 57 owner = self.owner 58 59 field_dict: dict[str, Any] = {} 60 field_dict.update(self.additional_properties) 61 field_dict.update( 62 { 63 "id": id, 64 "name": name, 65 "description": description, 66 "category": category, 67 "defaultComputeConfig": default_compute_config, 68 "versions": versions, 69 "owner": owner, 70 } 71 ) 72 73 return field_dict 74 75 @classmethod 76 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 77 from ..models.version_specification import VersionSpecification 78 from ..models.workspace_compute_config import WorkspaceComputeConfig 79 80 d = dict(src_dict) 81 id = d.pop("id") 82 83 name = d.pop("name") 84 85 description = d.pop("description") 86 87 category = d.pop("category") 88 89 default_compute_config = WorkspaceComputeConfig.from_dict(d.pop("defaultComputeConfig")) 90 91 versions = [] 92 _versions = d.pop("versions") 93 for versions_item_data in _versions: 94 versions_item = VersionSpecification.from_dict(versions_item_data) 95 96 versions.append(versions_item) 97 98 owner = d.pop("owner") 99 100 workspace_environment = cls( 101 id=id, 102 name=name, 103 description=description, 104 category=category, 105 default_compute_config=default_compute_config, 106 versions=versions, 107 owner=owner, 108 ) 109 110 workspace_environment.additional_properties = d 111 return workspace_environment 112 113 @property 114 def additional_keys(self) -> list[str]: 115 return list(self.additional_properties.keys()) 116 117 def __getitem__(self, key: str) -> Any: 118 return self.additional_properties[key] 119 120 def __setitem__(self, key: str, value: Any) -> None: 121 self.additional_properties[key] = value 122 123 def __delitem__(self, key: str) -> None: 124 del self.additional_properties[key] 125 126 def __contains__(self, key: str) -> bool: 127 return key in self.additional_properties
Attributes:
- id (str):
- name (str):
- description (str):
- category (str):
- default_compute_config (WorkspaceComputeConfig): Configuration parameters for a containerized workspace compute environment.
- versions (list[VersionSpecification]):
- owner (str):
30def __init__(self, id, name, description, category, default_compute_config, versions, owner): 31 self.id = id 32 self.name = name 33 self.description = description 34 self.category = category 35 self.default_compute_config = default_compute_config 36 self.versions = versions 37 self.owner = owner 38 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class WorkspaceEnvironment.
41 def to_dict(self) -> dict[str, Any]: 42 id = self.id 43 44 name = self.name 45 46 description = self.description 47 48 category = self.category 49 50 default_compute_config = self.default_compute_config.to_dict() 51 52 versions = [] 53 for versions_item_data in self.versions: 54 versions_item = versions_item_data.to_dict() 55 versions.append(versions_item) 56 57 owner = self.owner 58 59 field_dict: dict[str, Any] = {} 60 field_dict.update(self.additional_properties) 61 field_dict.update( 62 { 63 "id": id, 64 "name": name, 65 "description": description, 66 "category": category, 67 "defaultComputeConfig": default_compute_config, 68 "versions": versions, 69 "owner": owner, 70 } 71 ) 72 73 return field_dict
75 @classmethod 76 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 77 from ..models.version_specification import VersionSpecification 78 from ..models.workspace_compute_config import WorkspaceComputeConfig 79 80 d = dict(src_dict) 81 id = d.pop("id") 82 83 name = d.pop("name") 84 85 description = d.pop("description") 86 87 category = d.pop("category") 88 89 default_compute_config = WorkspaceComputeConfig.from_dict(d.pop("defaultComputeConfig")) 90 91 versions = [] 92 _versions = d.pop("versions") 93 for versions_item_data in _versions: 94 versions_item = VersionSpecification.from_dict(versions_item_data) 95 96 versions.append(versions_item) 97 98 owner = d.pop("owner") 99 100 workspace_environment = cls( 101 id=id, 102 name=name, 103 description=description, 104 category=category, 105 default_compute_config=default_compute_config, 106 versions=versions, 107 owner=owner, 108 ) 109 110 workspace_environment.additional_properties = d 111 return workspace_environment
21@_attrs_define 22class WorkspaceInput: 23 """ 24 Attributes: 25 name (str): Name of the workspace. Example: my-workspace. 26 mounted_datasets (list[MountedDataset]): List of datasets to mount into the workspace. 27 compute_config (WorkspaceComputeConfig): Configuration parameters for a containerized workspace compute 28 environment. 29 sharing_type (SharingType): 30 description (str | Unset): Description of the workspace. 31 environment_id (None | str | Unset): ID of the predefined workspace environment to use. 32 auto_stop_timeout (int | None | Unset): Time period (in hours) to automatically stop the workspace if running 33 """ 34 35 name: str 36 mounted_datasets: list[MountedDataset] 37 compute_config: WorkspaceComputeConfig 38 sharing_type: SharingType 39 description: str | Unset = UNSET 40 environment_id: None | str | Unset = UNSET 41 auto_stop_timeout: int | None | Unset = UNSET 42 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 43 44 def to_dict(self) -> dict[str, Any]: 45 name = self.name 46 47 mounted_datasets = [] 48 for mounted_datasets_item_data in self.mounted_datasets: 49 mounted_datasets_item = mounted_datasets_item_data.to_dict() 50 mounted_datasets.append(mounted_datasets_item) 51 52 compute_config = self.compute_config.to_dict() 53 54 sharing_type = self.sharing_type.value 55 56 description = self.description 57 58 environment_id: None | str | Unset 59 if isinstance(self.environment_id, Unset): 60 environment_id = UNSET 61 else: 62 environment_id = self.environment_id 63 64 auto_stop_timeout: int | None | Unset 65 if isinstance(self.auto_stop_timeout, Unset): 66 auto_stop_timeout = UNSET 67 else: 68 auto_stop_timeout = self.auto_stop_timeout 69 70 field_dict: dict[str, Any] = {} 71 field_dict.update(self.additional_properties) 72 field_dict.update( 73 { 74 "name": name, 75 "mountedDatasets": mounted_datasets, 76 "computeConfig": compute_config, 77 "sharingType": sharing_type, 78 } 79 ) 80 if description is not UNSET: 81 field_dict["description"] = description 82 if environment_id is not UNSET: 83 field_dict["environmentId"] = environment_id 84 if auto_stop_timeout is not UNSET: 85 field_dict["autoStopTimeout"] = auto_stop_timeout 86 87 return field_dict 88 89 @classmethod 90 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 91 from ..models.mounted_dataset import MountedDataset 92 from ..models.workspace_compute_config import WorkspaceComputeConfig 93 94 d = dict(src_dict) 95 name = d.pop("name") 96 97 mounted_datasets = [] 98 _mounted_datasets = d.pop("mountedDatasets") 99 for mounted_datasets_item_data in _mounted_datasets: 100 mounted_datasets_item = MountedDataset.from_dict(mounted_datasets_item_data) 101 102 mounted_datasets.append(mounted_datasets_item) 103 104 compute_config = WorkspaceComputeConfig.from_dict(d.pop("computeConfig")) 105 106 sharing_type = SharingType(d.pop("sharingType")) 107 108 description = d.pop("description", UNSET) 109 110 def _parse_environment_id(data: object) -> None | str | Unset: 111 if data is None: 112 return data 113 if isinstance(data, Unset): 114 return data 115 return cast(None | str | Unset, data) 116 117 environment_id = _parse_environment_id(d.pop("environmentId", UNSET)) 118 119 def _parse_auto_stop_timeout(data: object) -> int | None | Unset: 120 if data is None: 121 return data 122 if isinstance(data, Unset): 123 return data 124 return cast(int | None | Unset, data) 125 126 auto_stop_timeout = _parse_auto_stop_timeout(d.pop("autoStopTimeout", UNSET)) 127 128 workspace_input = cls( 129 name=name, 130 mounted_datasets=mounted_datasets, 131 compute_config=compute_config, 132 sharing_type=sharing_type, 133 description=description, 134 environment_id=environment_id, 135 auto_stop_timeout=auto_stop_timeout, 136 ) 137 138 workspace_input.additional_properties = d 139 return workspace_input 140 141 @property 142 def additional_keys(self) -> list[str]: 143 return list(self.additional_properties.keys()) 144 145 def __getitem__(self, key: str) -> Any: 146 return self.additional_properties[key] 147 148 def __setitem__(self, key: str, value: Any) -> None: 149 self.additional_properties[key] = value 150 151 def __delitem__(self, key: str) -> None: 152 del self.additional_properties[key] 153 154 def __contains__(self, key: str) -> bool: 155 return key in self.additional_properties
Attributes:
- name (str): Name of the workspace. Example: my-workspace.
- mounted_datasets (list[MountedDataset]): List of datasets to mount into the workspace.
- compute_config (WorkspaceComputeConfig): Configuration parameters for a containerized workspace compute environment.
- sharing_type (SharingType):
- description (str | Unset): Description of the workspace.
- environment_id (None | str | Unset): ID of the predefined workspace environment to use.
- auto_stop_timeout (int | None | Unset): Time period (in hours) to automatically stop the workspace if running
30def __init__(self, name, mounted_datasets, compute_config, sharing_type, description=attr_dict['description'].default, environment_id=attr_dict['environment_id'].default, auto_stop_timeout=attr_dict['auto_stop_timeout'].default): 31 self.name = name 32 self.mounted_datasets = mounted_datasets 33 self.compute_config = compute_config 34 self.sharing_type = sharing_type 35 self.description = description 36 self.environment_id = environment_id 37 self.auto_stop_timeout = auto_stop_timeout 38 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class WorkspaceInput.
44 def to_dict(self) -> dict[str, Any]: 45 name = self.name 46 47 mounted_datasets = [] 48 for mounted_datasets_item_data in self.mounted_datasets: 49 mounted_datasets_item = mounted_datasets_item_data.to_dict() 50 mounted_datasets.append(mounted_datasets_item) 51 52 compute_config = self.compute_config.to_dict() 53 54 sharing_type = self.sharing_type.value 55 56 description = self.description 57 58 environment_id: None | str | Unset 59 if isinstance(self.environment_id, Unset): 60 environment_id = UNSET 61 else: 62 environment_id = self.environment_id 63 64 auto_stop_timeout: int | None | Unset 65 if isinstance(self.auto_stop_timeout, Unset): 66 auto_stop_timeout = UNSET 67 else: 68 auto_stop_timeout = self.auto_stop_timeout 69 70 field_dict: dict[str, Any] = {} 71 field_dict.update(self.additional_properties) 72 field_dict.update( 73 { 74 "name": name, 75 "mountedDatasets": mounted_datasets, 76 "computeConfig": compute_config, 77 "sharingType": sharing_type, 78 } 79 ) 80 if description is not UNSET: 81 field_dict["description"] = description 82 if environment_id is not UNSET: 83 field_dict["environmentId"] = environment_id 84 if auto_stop_timeout is not UNSET: 85 field_dict["autoStopTimeout"] = auto_stop_timeout 86 87 return field_dict
89 @classmethod 90 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 91 from ..models.mounted_dataset import MountedDataset 92 from ..models.workspace_compute_config import WorkspaceComputeConfig 93 94 d = dict(src_dict) 95 name = d.pop("name") 96 97 mounted_datasets = [] 98 _mounted_datasets = d.pop("mountedDatasets") 99 for mounted_datasets_item_data in _mounted_datasets: 100 mounted_datasets_item = MountedDataset.from_dict(mounted_datasets_item_data) 101 102 mounted_datasets.append(mounted_datasets_item) 103 104 compute_config = WorkspaceComputeConfig.from_dict(d.pop("computeConfig")) 105 106 sharing_type = SharingType(d.pop("sharingType")) 107 108 description = d.pop("description", UNSET) 109 110 def _parse_environment_id(data: object) -> None | str | Unset: 111 if data is None: 112 return data 113 if isinstance(data, Unset): 114 return data 115 return cast(None | str | Unset, data) 116 117 environment_id = _parse_environment_id(d.pop("environmentId", UNSET)) 118 119 def _parse_auto_stop_timeout(data: object) -> int | None | Unset: 120 if data is None: 121 return data 122 if isinstance(data, Unset): 123 return data 124 return cast(int | None | Unset, data) 125 126 auto_stop_timeout = _parse_auto_stop_timeout(d.pop("autoStopTimeout", UNSET)) 127 128 workspace_input = cls( 129 name=name, 130 mounted_datasets=mounted_datasets, 131 compute_config=compute_config, 132 sharing_type=sharing_type, 133 description=description, 134 environment_id=environment_id, 135 auto_stop_timeout=auto_stop_timeout, 136 ) 137 138 workspace_input.additional_properties = d 139 return workspace_input
15@_attrs_define 16class WorkspaceSession: 17 """ 18 Attributes: 19 id (str): 20 user (str): 21 created_at (datetime.datetime): 22 """ 23 24 id: str 25 user: str 26 created_at: datetime.datetime 27 additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> dict[str, Any]: 30 id = self.id 31 32 user = self.user 33 34 created_at = self.created_at.isoformat() 35 36 field_dict: dict[str, Any] = {} 37 field_dict.update(self.additional_properties) 38 field_dict.update( 39 { 40 "id": id, 41 "user": user, 42 "createdAt": created_at, 43 } 44 ) 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 d = dict(src_dict) 51 id = d.pop("id") 52 53 user = d.pop("user") 54 55 created_at = isoparse(d.pop("createdAt")) 56 57 workspace_session = cls( 58 id=id, 59 user=user, 60 created_at=created_at, 61 ) 62 63 workspace_session.additional_properties = d 64 return workspace_session 65 66 @property 67 def additional_keys(self) -> list[str]: 68 return list(self.additional_properties.keys()) 69 70 def __getitem__(self, key: str) -> Any: 71 return self.additional_properties[key] 72 73 def __setitem__(self, key: str, value: Any) -> None: 74 self.additional_properties[key] = value 75 76 def __delitem__(self, key: str) -> None: 77 del self.additional_properties[key] 78 79 def __contains__(self, key: str) -> bool: 80 return key in self.additional_properties
Attributes:
- id (str):
- user (str):
- created_at (datetime.datetime):
26def __init__(self, id, user, created_at): 27 self.id = id 28 self.user = user 29 self.created_at = created_at 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class WorkspaceSession.
29 def to_dict(self) -> dict[str, Any]: 30 id = self.id 31 32 user = self.user 33 34 created_at = self.created_at.isoformat() 35 36 field_dict: dict[str, Any] = {} 37 field_dict.update(self.additional_properties) 38 field_dict.update( 39 { 40 "id": id, 41 "user": user, 42 "createdAt": created_at, 43 } 44 ) 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: 50 d = dict(src_dict) 51 id = d.pop("id") 52 53 user = d.pop("user") 54 55 created_at = isoparse(d.pop("createdAt")) 56 57 workspace_session = cls( 58 id=id, 59 user=user, 60 created_at=created_at, 61 ) 62 63 workspace_session.additional_properties = d 64 return workspace_session