cirro_api_client.v1.models
Contains all the data models used in inputs/outputs
1"""Contains all the data models used in inputs/outputs""" 2 3from .agent import Agent 4from .agent_detail import AgentDetail 5from .agent_detail_environment_configuration import AgentDetailEnvironmentConfiguration 6from .agent_detail_tags import AgentDetailTags 7from .agent_input import AgentInput 8from .agent_input_configuration_options_schema import AgentInputConfigurationOptionsSchema 9from .agent_input_environment_configuration import AgentInputEnvironmentConfiguration 10from .agent_input_tags import AgentInputTags 11from .agent_registration import AgentRegistration 12from .agent_status import AgentStatus 13from .agent_tags import AgentTags 14from .allowed_data_type import AllowedDataType 15from .approve_project_access_request import ApproveProjectAccessRequest 16from .artifact import Artifact 17from .artifact_type import ArtifactType 18from .audit_event import AuditEvent 19from .audit_event_changes import AuditEventChanges 20from .audit_event_event_detail import AuditEventEventDetail 21from .auth_info import AuthInfo 22from .aws_credentials import AWSCredentials 23from .billing_account import BillingAccount 24from .billing_account_request import BillingAccountRequest 25from .billing_method import BillingMethod 26from .budget_period import BudgetPeriod 27from .calculate_pipeline_cost_request import CalculatePipelineCostRequest 28from .classification_input import ClassificationInput 29from .cloud_account import CloudAccount 30from .cloud_account_type import CloudAccountType 31from .column_definition import ColumnDefinition 32from .compute_environment_configuration import ComputeEnvironmentConfiguration 33from .compute_environment_configuration_input import ComputeEnvironmentConfigurationInput 34from .compute_environment_configuration_input_properties import ComputeEnvironmentConfigurationInputProperties 35from .compute_environment_configuration_properties import ComputeEnvironmentConfigurationProperties 36from .contact import Contact 37from .contact_input import ContactInput 38from .create_notebook_instance_request import CreateNotebookInstanceRequest 39from .create_project_access_request import CreateProjectAccessRequest 40from .create_reference_request import CreateReferenceRequest 41from .create_response import CreateResponse 42from .custom_pipeline_settings import CustomPipelineSettings 43from .custom_process_input import CustomProcessInput 44from .customer_type import CustomerType 45from .dashboard import Dashboard 46from .dashboard_dashboard_data import DashboardDashboardData 47from .dashboard_info import DashboardInfo 48from .dashboard_request import DashboardRequest 49from .dashboard_request_dashboard_data import DashboardRequestDashboardData 50from .dashboard_request_info import DashboardRequestInfo 51from .data_file import DataFile 52from .data_file_metadata import DataFileMetadata 53from .dataset import Dataset 54from .dataset_assets_manifest import DatasetAssetsManifest 55from .dataset_condition import DatasetCondition 56from .dataset_condition_field import DatasetConditionField 57from .dataset_detail import DatasetDetail 58from .dataset_detail_info import DatasetDetailInfo 59from .dataset_detail_params import DatasetDetailParams 60from .dataset_detail_source_sample_files_map import DatasetDetailSourceSampleFilesMap 61from .dataset_viz import DatasetViz 62from .discussion import Discussion 63from .discussion_input import DiscussionInput 64from .discussion_type import DiscussionType 65from .entity import Entity 66from .entity_type import EntityType 67from .environment_type import EnvironmentType 68from .error_message import ErrorMessage 69from .executor import Executor 70from .feature_flags import FeatureFlags 71from .file_entry import FileEntry 72from .file_entry_metadata import FileEntryMetadata 73from .file_mapping_rule import FileMappingRule 74from .file_name_match import FileNameMatch 75from .file_name_pattern import FileNamePattern 76from .file_requirements import FileRequirements 77from .form_schema import FormSchema 78from .form_schema_form import FormSchemaForm 79from .form_schema_ui import FormSchemaUi 80from .fulfillment_response import FulfillmentResponse 81from .generate_sftp_credentials_request import GenerateSftpCredentialsRequest 82from .get_execution_logs_response import GetExecutionLogsResponse 83from .get_project_summary_response_200 import GetProjectSummaryResponse200 84from .governance_access_type import GovernanceAccessType 85from .governance_classification import GovernanceClassification 86from .governance_contact import GovernanceContact 87from .governance_expiry import GovernanceExpiry 88from .governance_expiry_type import GovernanceExpiryType 89from .governance_file import GovernanceFile 90from .governance_file_access_request import GovernanceFileAccessRequest 91from .governance_file_input import GovernanceFileInput 92from .governance_file_type import GovernanceFileType 93from .governance_requirement import GovernanceRequirement 94from .governance_requirement_project_file_map import GovernanceRequirementProjectFileMap 95from .governance_scope import GovernanceScope 96from .governance_training_verification import GovernanceTrainingVerification 97from .governance_type import GovernanceType 98from .import_data_request import ImportDataRequest 99from .invite_user_request import InviteUserRequest 100from .invite_user_response import InviteUserResponse 101from .list_events_entity_type import ListEventsEntityType 102from .log_entry import LogEntry 103from .login_provider import LoginProvider 104from .message import Message 105from .message_input import MessageInput 106from .message_type import MessageType 107from .metric_record import MetricRecord 108from .metric_record_services import MetricRecordServices 109from .move_dataset_input import MoveDatasetInput 110from .move_dataset_response import MoveDatasetResponse 111from .named_item import NamedItem 112from .notebook_instance import NotebookInstance 113from .notebook_instance_status_response import NotebookInstanceStatusResponse 114from .open_notebook_instance_response import OpenNotebookInstanceResponse 115from .paginated_response_dataset_list_dto import PaginatedResponseDatasetListDto 116from .paginated_response_discussion import PaginatedResponseDiscussion 117from .paginated_response_message import PaginatedResponseMessage 118from .paginated_response_sample_dto import PaginatedResponseSampleDto 119from .paginated_response_user_dto import PaginatedResponseUserDto 120from .pipeline_code import PipelineCode 121from .pipeline_cost import PipelineCost 122from .portal_error_response import PortalErrorResponse 123from .process import Process 124from .process_detail import ProcessDetail 125from .project import Project 126from .project_access_request import ProjectAccessRequest 127from .project_access_type import ProjectAccessType 128from .project_create_options import ProjectCreateOptions 129from .project_detail import ProjectDetail 130from .project_file_access_request import ProjectFileAccessRequest 131from .project_input import ProjectInput 132from .project_metrics import ProjectMetrics 133from .project_request import ProjectRequest 134from .project_requirement import ProjectRequirement 135from .project_role import ProjectRole 136from .project_settings import ProjectSettings 137from .project_user import ProjectUser 138from .reference import Reference 139from .reference_type import ReferenceType 140from .reference_type_validation_item import ReferenceTypeValidationItem 141from .repository_type import RepositoryType 142from .request_status import RequestStatus 143from .requirement_fulfillment_input import RequirementFulfillmentInput 144from .requirement_input import RequirementInput 145from .resources_info import ResourcesInfo 146from .run_analysis_request import RunAnalysisRequest 147from .run_analysis_request_params import RunAnalysisRequestParams 148from .run_analysis_request_source_sample_files_map import RunAnalysisRequestSourceSampleFilesMap 149from .sample import Sample 150from .sample_metadata import SampleMetadata 151from .sample_request import SampleRequest 152from .sample_request_metadata import SampleRequestMetadata 153from .sample_sheets import SampleSheets 154from .service_connection import ServiceConnection 155from .set_user_project_role_request import SetUserProjectRoleRequest 156from .sftp_credentials import SftpCredentials 157from .share import Share 158from .share_detail import ShareDetail 159from .share_input import ShareInput 160from .share_type import ShareType 161from .sort_order import SortOrder 162from .status import Status 163from .stop_execution_response import StopExecutionResponse 164from .sync_status import SyncStatus 165from .system_info_response import SystemInfoResponse 166from .table import Table 167from .tag import Tag 168from .task import Task 169from .tenant_info import TenantInfo 170from .update_dataset_request import UpdateDatasetRequest 171from .update_user_request import UpdateUserRequest 172from .upload_dataset_create_response import UploadDatasetCreateResponse 173from .upload_dataset_request import UploadDatasetRequest 174from .user import User 175from .user_detail import UserDetail 176from .user_project_assignment import UserProjectAssignment 177from .user_settings import UserSettings 178from .validate_file_name_patterns_request import ValidateFileNamePatternsRequest 179from .validate_file_requirements_request import ValidateFileRequirementsRequest 180 181__all__ = ( 182 "Agent", 183 "AgentDetail", 184 "AgentDetailEnvironmentConfiguration", 185 "AgentDetailTags", 186 "AgentInput", 187 "AgentInputConfigurationOptionsSchema", 188 "AgentInputEnvironmentConfiguration", 189 "AgentInputTags", 190 "AgentRegistration", 191 "AgentStatus", 192 "AgentTags", 193 "AllowedDataType", 194 "ApproveProjectAccessRequest", 195 "Artifact", 196 "ArtifactType", 197 "AuditEvent", 198 "AuditEventChanges", 199 "AuditEventEventDetail", 200 "AuthInfo", 201 "AWSCredentials", 202 "BillingAccount", 203 "BillingAccountRequest", 204 "BillingMethod", 205 "BudgetPeriod", 206 "CalculatePipelineCostRequest", 207 "ClassificationInput", 208 "CloudAccount", 209 "CloudAccountType", 210 "ColumnDefinition", 211 "ComputeEnvironmentConfiguration", 212 "ComputeEnvironmentConfigurationInput", 213 "ComputeEnvironmentConfigurationInputProperties", 214 "ComputeEnvironmentConfigurationProperties", 215 "Contact", 216 "ContactInput", 217 "CreateNotebookInstanceRequest", 218 "CreateProjectAccessRequest", 219 "CreateReferenceRequest", 220 "CreateResponse", 221 "CustomerType", 222 "CustomPipelineSettings", 223 "CustomProcessInput", 224 "Dashboard", 225 "DashboardDashboardData", 226 "DashboardInfo", 227 "DashboardRequest", 228 "DashboardRequestDashboardData", 229 "DashboardRequestInfo", 230 "DataFile", 231 "DataFileMetadata", 232 "Dataset", 233 "DatasetAssetsManifest", 234 "DatasetCondition", 235 "DatasetConditionField", 236 "DatasetDetail", 237 "DatasetDetailInfo", 238 "DatasetDetailParams", 239 "DatasetDetailSourceSampleFilesMap", 240 "DatasetViz", 241 "Discussion", 242 "DiscussionInput", 243 "DiscussionType", 244 "Entity", 245 "EntityType", 246 "EnvironmentType", 247 "ErrorMessage", 248 "Executor", 249 "FeatureFlags", 250 "FileEntry", 251 "FileEntryMetadata", 252 "FileMappingRule", 253 "FileNameMatch", 254 "FileNamePattern", 255 "FileRequirements", 256 "FormSchema", 257 "FormSchemaForm", 258 "FormSchemaUi", 259 "FulfillmentResponse", 260 "GenerateSftpCredentialsRequest", 261 "GetExecutionLogsResponse", 262 "GetProjectSummaryResponse200", 263 "GovernanceAccessType", 264 "GovernanceClassification", 265 "GovernanceContact", 266 "GovernanceExpiry", 267 "GovernanceExpiryType", 268 "GovernanceFile", 269 "GovernanceFileAccessRequest", 270 "GovernanceFileInput", 271 "GovernanceFileType", 272 "GovernanceRequirement", 273 "GovernanceRequirementProjectFileMap", 274 "GovernanceScope", 275 "GovernanceTrainingVerification", 276 "GovernanceType", 277 "ImportDataRequest", 278 "InviteUserRequest", 279 "InviteUserResponse", 280 "ListEventsEntityType", 281 "LogEntry", 282 "LoginProvider", 283 "Message", 284 "MessageInput", 285 "MessageType", 286 "MetricRecord", 287 "MetricRecordServices", 288 "MoveDatasetInput", 289 "MoveDatasetResponse", 290 "NamedItem", 291 "NotebookInstance", 292 "NotebookInstanceStatusResponse", 293 "OpenNotebookInstanceResponse", 294 "PaginatedResponseDatasetListDto", 295 "PaginatedResponseDiscussion", 296 "PaginatedResponseMessage", 297 "PaginatedResponseSampleDto", 298 "PaginatedResponseUserDto", 299 "PipelineCode", 300 "PipelineCost", 301 "PortalErrorResponse", 302 "Process", 303 "ProcessDetail", 304 "Project", 305 "ProjectAccessRequest", 306 "ProjectAccessType", 307 "ProjectCreateOptions", 308 "ProjectDetail", 309 "ProjectFileAccessRequest", 310 "ProjectInput", 311 "ProjectMetrics", 312 "ProjectRequest", 313 "ProjectRequirement", 314 "ProjectRole", 315 "ProjectSettings", 316 "ProjectUser", 317 "Reference", 318 "ReferenceType", 319 "ReferenceTypeValidationItem", 320 "RepositoryType", 321 "RequestStatus", 322 "RequirementFulfillmentInput", 323 "RequirementInput", 324 "ResourcesInfo", 325 "RunAnalysisRequest", 326 "RunAnalysisRequestParams", 327 "RunAnalysisRequestSourceSampleFilesMap", 328 "Sample", 329 "SampleMetadata", 330 "SampleRequest", 331 "SampleRequestMetadata", 332 "SampleSheets", 333 "ServiceConnection", 334 "SetUserProjectRoleRequest", 335 "SftpCredentials", 336 "Share", 337 "ShareDetail", 338 "ShareInput", 339 "ShareType", 340 "SortOrder", 341 "Status", 342 "StopExecutionResponse", 343 "SyncStatus", 344 "SystemInfoResponse", 345 "Table", 346 "Tag", 347 "Task", 348 "TenantInfo", 349 "UpdateDatasetRequest", 350 "UpdateUserRequest", 351 "UploadDatasetCreateResponse", 352 "UploadDatasetRequest", 353 "User", 354 "UserDetail", 355 "UserProjectAssignment", 356 "UserSettings", 357 "ValidateFileNamePatternsRequest", 358 "ValidateFileRequirementsRequest", 359)
17@_attrs_define 18class Agent: 19 """Details of the agent 20 21 Attributes: 22 status (AgentStatus): The status of the agent 23 id (Union[Unset, str]): The unique ID of the agent 24 name (Union[Unset, str]): The display name of the agent 25 tags (Union[Unset, AgentTags]): Tags associated with the agent 26 """ 27 28 status: AgentStatus 29 id: Union[Unset, str] = UNSET 30 name: Union[Unset, str] = UNSET 31 tags: Union[Unset, "AgentTags"] = UNSET 32 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 33 34 def to_dict(self) -> Dict[str, Any]: 35 status = self.status.value 36 37 id = self.id 38 39 name = self.name 40 41 tags: Union[Unset, Dict[str, Any]] = UNSET 42 if not isinstance(self.tags, Unset): 43 tags = self.tags.to_dict() 44 45 field_dict: Dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "status": status, 50 } 51 ) 52 if id is not UNSET: 53 field_dict["id"] = id 54 if name is not UNSET: 55 field_dict["name"] = name 56 if tags is not UNSET: 57 field_dict["tags"] = tags 58 59 return field_dict 60 61 @classmethod 62 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 63 from ..models.agent_tags import AgentTags 64 65 d = src_dict.copy() 66 status = AgentStatus(d.pop("status")) 67 68 id = d.pop("id", UNSET) 69 70 name = d.pop("name", UNSET) 71 72 _tags = d.pop("tags", UNSET) 73 tags: Union[Unset, AgentTags] 74 if isinstance(_tags, Unset): 75 tags = UNSET 76 else: 77 tags = AgentTags.from_dict(_tags) 78 79 agent = cls( 80 status=status, 81 id=id, 82 name=name, 83 tags=tags, 84 ) 85 86 agent.additional_properties = d 87 return agent 88 89 @property 90 def additional_keys(self) -> List[str]: 91 return list(self.additional_properties.keys())
Details of the agent
Attributes:
- status (AgentStatus): The status of the agent
- id (Union[Unset, str]): The unique ID of the agent
- name (Union[Unset, str]): The display name of the agent
- tags (Union[Unset, AgentTags]): Tags associated with the agent
27def __init__(self, status, id=attr_dict['id'].default, name=attr_dict['name'].default, tags=attr_dict['tags'].default): 28 self.status = status 29 self.id = id 30 self.name = name 31 self.tags = tags 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Agent.
34 def to_dict(self) -> Dict[str, Any]: 35 status = self.status.value 36 37 id = self.id 38 39 name = self.name 40 41 tags: Union[Unset, Dict[str, Any]] = UNSET 42 if not isinstance(self.tags, Unset): 43 tags = self.tags.to_dict() 44 45 field_dict: Dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "status": status, 50 } 51 ) 52 if id is not UNSET: 53 field_dict["id"] = id 54 if name is not UNSET: 55 field_dict["name"] = name 56 if tags is not UNSET: 57 field_dict["tags"] = tags 58 59 return field_dict
61 @classmethod 62 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 63 from ..models.agent_tags import AgentTags 64 65 d = src_dict.copy() 66 status = AgentStatus(d.pop("status")) 67 68 id = d.pop("id", UNSET) 69 70 name = d.pop("name", UNSET) 71 72 _tags = d.pop("tags", UNSET) 73 tags: Union[Unset, AgentTags] 74 if isinstance(_tags, Unset): 75 tags = UNSET 76 else: 77 tags = AgentTags.from_dict(_tags) 78 79 agent = cls( 80 status=status, 81 id=id, 82 name=name, 83 tags=tags, 84 ) 85 86 agent.additional_properties = d 87 return agent
21@_attrs_define 22class AgentDetail: 23 """ 24 Attributes: 25 id (str): 26 name (str): 27 agent_role_arn (str): 28 status (AgentStatus): The status of the agent 29 created_by (str): 30 created_at (datetime.datetime): 31 updated_at (datetime.datetime): 32 registration (Union['AgentRegistration', None, Unset]): 33 tags (Union['AgentDetailTags', None, Unset]): 34 environment_configuration (Union['AgentDetailEnvironmentConfiguration', None, Unset]): 35 """ 36 37 id: str 38 name: str 39 agent_role_arn: str 40 status: AgentStatus 41 created_by: str 42 created_at: datetime.datetime 43 updated_at: datetime.datetime 44 registration: Union["AgentRegistration", None, Unset] = UNSET 45 tags: Union["AgentDetailTags", None, Unset] = UNSET 46 environment_configuration: Union["AgentDetailEnvironmentConfiguration", None, Unset] = UNSET 47 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 48 49 def to_dict(self) -> Dict[str, Any]: 50 from ..models.agent_detail_environment_configuration import AgentDetailEnvironmentConfiguration 51 from ..models.agent_detail_tags import AgentDetailTags 52 from ..models.agent_registration import AgentRegistration 53 54 id = self.id 55 56 name = self.name 57 58 agent_role_arn = self.agent_role_arn 59 60 status = self.status.value 61 62 created_by = self.created_by 63 64 created_at = self.created_at.isoformat() 65 66 updated_at = self.updated_at.isoformat() 67 68 registration: Union[Dict[str, Any], None, Unset] 69 if isinstance(self.registration, Unset): 70 registration = UNSET 71 elif isinstance(self.registration, AgentRegistration): 72 registration = self.registration.to_dict() 73 else: 74 registration = self.registration 75 76 tags: Union[Dict[str, Any], None, Unset] 77 if isinstance(self.tags, Unset): 78 tags = UNSET 79 elif isinstance(self.tags, AgentDetailTags): 80 tags = self.tags.to_dict() 81 else: 82 tags = self.tags 83 84 environment_configuration: Union[Dict[str, Any], None, Unset] 85 if isinstance(self.environment_configuration, Unset): 86 environment_configuration = UNSET 87 elif isinstance(self.environment_configuration, AgentDetailEnvironmentConfiguration): 88 environment_configuration = self.environment_configuration.to_dict() 89 else: 90 environment_configuration = self.environment_configuration 91 92 field_dict: Dict[str, Any] = {} 93 field_dict.update(self.additional_properties) 94 field_dict.update( 95 { 96 "id": id, 97 "name": name, 98 "agentRoleArn": agent_role_arn, 99 "status": status, 100 "createdBy": created_by, 101 "createdAt": created_at, 102 "updatedAt": updated_at, 103 } 104 ) 105 if registration is not UNSET: 106 field_dict["registration"] = registration 107 if tags is not UNSET: 108 field_dict["tags"] = tags 109 if environment_configuration is not UNSET: 110 field_dict["environmentConfiguration"] = environment_configuration 111 112 return field_dict 113 114 @classmethod 115 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 116 from ..models.agent_detail_environment_configuration import AgentDetailEnvironmentConfiguration 117 from ..models.agent_detail_tags import AgentDetailTags 118 from ..models.agent_registration import AgentRegistration 119 120 d = src_dict.copy() 121 id = d.pop("id") 122 123 name = d.pop("name") 124 125 agent_role_arn = d.pop("agentRoleArn") 126 127 status = AgentStatus(d.pop("status")) 128 129 created_by = d.pop("createdBy") 130 131 created_at = isoparse(d.pop("createdAt")) 132 133 updated_at = isoparse(d.pop("updatedAt")) 134 135 def _parse_registration(data: object) -> Union["AgentRegistration", None, Unset]: 136 if data is None: 137 return data 138 if isinstance(data, Unset): 139 return data 140 try: 141 if not isinstance(data, dict): 142 raise TypeError() 143 registration_type_1 = AgentRegistration.from_dict(data) 144 145 return registration_type_1 146 except: # noqa: E722 147 pass 148 return cast(Union["AgentRegistration", None, Unset], data) 149 150 registration = _parse_registration(d.pop("registration", UNSET)) 151 152 def _parse_tags(data: object) -> Union["AgentDetailTags", None, Unset]: 153 if data is None: 154 return data 155 if isinstance(data, Unset): 156 return data 157 try: 158 if not isinstance(data, dict): 159 raise TypeError() 160 tags_type_0 = AgentDetailTags.from_dict(data) 161 162 return tags_type_0 163 except: # noqa: E722 164 pass 165 return cast(Union["AgentDetailTags", None, Unset], data) 166 167 tags = _parse_tags(d.pop("tags", UNSET)) 168 169 def _parse_environment_configuration(data: object) -> Union["AgentDetailEnvironmentConfiguration", None, Unset]: 170 if data is None: 171 return data 172 if isinstance(data, Unset): 173 return data 174 try: 175 if not isinstance(data, dict): 176 raise TypeError() 177 environment_configuration_type_0 = AgentDetailEnvironmentConfiguration.from_dict(data) 178 179 return environment_configuration_type_0 180 except: # noqa: E722 181 pass 182 return cast(Union["AgentDetailEnvironmentConfiguration", None, Unset], data) 183 184 environment_configuration = _parse_environment_configuration(d.pop("environmentConfiguration", UNSET)) 185 186 agent_detail = cls( 187 id=id, 188 name=name, 189 agent_role_arn=agent_role_arn, 190 status=status, 191 created_by=created_by, 192 created_at=created_at, 193 updated_at=updated_at, 194 registration=registration, 195 tags=tags, 196 environment_configuration=environment_configuration, 197 ) 198 199 agent_detail.additional_properties = d 200 return agent_detail 201 202 @property 203 def additional_keys(self) -> List[str]: 204 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- agent_role_arn (str):
- status (AgentStatus): The status of the agent
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- registration (Union['AgentRegistration', None, Unset]):
- tags (Union['AgentDetailTags', None, Unset]):
- environment_configuration (Union['AgentDetailEnvironmentConfiguration', None, Unset]):
33def __init__(self, id, name, agent_role_arn, status, created_by, created_at, updated_at, registration=attr_dict['registration'].default, tags=attr_dict['tags'].default, environment_configuration=attr_dict['environment_configuration'].default): 34 self.id = id 35 self.name = name 36 self.agent_role_arn = agent_role_arn 37 self.status = status 38 self.created_by = created_by 39 self.created_at = created_at 40 self.updated_at = updated_at 41 self.registration = registration 42 self.tags = tags 43 self.environment_configuration = environment_configuration 44 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AgentDetail.
49 def to_dict(self) -> Dict[str, Any]: 50 from ..models.agent_detail_environment_configuration import AgentDetailEnvironmentConfiguration 51 from ..models.agent_detail_tags import AgentDetailTags 52 from ..models.agent_registration import AgentRegistration 53 54 id = self.id 55 56 name = self.name 57 58 agent_role_arn = self.agent_role_arn 59 60 status = self.status.value 61 62 created_by = self.created_by 63 64 created_at = self.created_at.isoformat() 65 66 updated_at = self.updated_at.isoformat() 67 68 registration: Union[Dict[str, Any], None, Unset] 69 if isinstance(self.registration, Unset): 70 registration = UNSET 71 elif isinstance(self.registration, AgentRegistration): 72 registration = self.registration.to_dict() 73 else: 74 registration = self.registration 75 76 tags: Union[Dict[str, Any], None, Unset] 77 if isinstance(self.tags, Unset): 78 tags = UNSET 79 elif isinstance(self.tags, AgentDetailTags): 80 tags = self.tags.to_dict() 81 else: 82 tags = self.tags 83 84 environment_configuration: Union[Dict[str, Any], None, Unset] 85 if isinstance(self.environment_configuration, Unset): 86 environment_configuration = UNSET 87 elif isinstance(self.environment_configuration, AgentDetailEnvironmentConfiguration): 88 environment_configuration = self.environment_configuration.to_dict() 89 else: 90 environment_configuration = self.environment_configuration 91 92 field_dict: Dict[str, Any] = {} 93 field_dict.update(self.additional_properties) 94 field_dict.update( 95 { 96 "id": id, 97 "name": name, 98 "agentRoleArn": agent_role_arn, 99 "status": status, 100 "createdBy": created_by, 101 "createdAt": created_at, 102 "updatedAt": updated_at, 103 } 104 ) 105 if registration is not UNSET: 106 field_dict["registration"] = registration 107 if tags is not UNSET: 108 field_dict["tags"] = tags 109 if environment_configuration is not UNSET: 110 field_dict["environmentConfiguration"] = environment_configuration 111 112 return field_dict
114 @classmethod 115 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 116 from ..models.agent_detail_environment_configuration import AgentDetailEnvironmentConfiguration 117 from ..models.agent_detail_tags import AgentDetailTags 118 from ..models.agent_registration import AgentRegistration 119 120 d = src_dict.copy() 121 id = d.pop("id") 122 123 name = d.pop("name") 124 125 agent_role_arn = d.pop("agentRoleArn") 126 127 status = AgentStatus(d.pop("status")) 128 129 created_by = d.pop("createdBy") 130 131 created_at = isoparse(d.pop("createdAt")) 132 133 updated_at = isoparse(d.pop("updatedAt")) 134 135 def _parse_registration(data: object) -> Union["AgentRegistration", None, Unset]: 136 if data is None: 137 return data 138 if isinstance(data, Unset): 139 return data 140 try: 141 if not isinstance(data, dict): 142 raise TypeError() 143 registration_type_1 = AgentRegistration.from_dict(data) 144 145 return registration_type_1 146 except: # noqa: E722 147 pass 148 return cast(Union["AgentRegistration", None, Unset], data) 149 150 registration = _parse_registration(d.pop("registration", UNSET)) 151 152 def _parse_tags(data: object) -> Union["AgentDetailTags", None, Unset]: 153 if data is None: 154 return data 155 if isinstance(data, Unset): 156 return data 157 try: 158 if not isinstance(data, dict): 159 raise TypeError() 160 tags_type_0 = AgentDetailTags.from_dict(data) 161 162 return tags_type_0 163 except: # noqa: E722 164 pass 165 return cast(Union["AgentDetailTags", None, Unset], data) 166 167 tags = _parse_tags(d.pop("tags", UNSET)) 168 169 def _parse_environment_configuration(data: object) -> Union["AgentDetailEnvironmentConfiguration", None, Unset]: 170 if data is None: 171 return data 172 if isinstance(data, Unset): 173 return data 174 try: 175 if not isinstance(data, dict): 176 raise TypeError() 177 environment_configuration_type_0 = AgentDetailEnvironmentConfiguration.from_dict(data) 178 179 return environment_configuration_type_0 180 except: # noqa: E722 181 pass 182 return cast(Union["AgentDetailEnvironmentConfiguration", None, Unset], data) 183 184 environment_configuration = _parse_environment_configuration(d.pop("environmentConfiguration", UNSET)) 185 186 agent_detail = cls( 187 id=id, 188 name=name, 189 agent_role_arn=agent_role_arn, 190 status=status, 191 created_by=created_by, 192 created_at=created_at, 193 updated_at=updated_at, 194 registration=registration, 195 tags=tags, 196 environment_configuration=environment_configuration, 197 ) 198 199 agent_detail.additional_properties = d 200 return agent_detail
10@_attrs_define 11class AgentDetailEnvironmentConfiguration: 12 """ """ 13 14 additional_properties: Dict[str, str] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 agent_detail_environment_configuration = cls() 27 28 agent_detail_environment_configuration.additional_properties = d 29 return agent_detail_environment_configuration 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
Method generated by attrs for class AgentDetailEnvironmentConfiguration.
10@_attrs_define 11class AgentDetailTags: 12 """ """ 13 14 additional_properties: Dict[str, str] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 agent_detail_tags = cls() 27 28 agent_detail_tags.additional_properties = d 29 return agent_detail_tags 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
18@_attrs_define 19class AgentInput: 20 """ 21 Attributes: 22 name (str): The display name of the agent 23 agent_role_arn (str): Arn of the AWS IAM role or user that the agent will use (JSONSchema format) 24 id (Union[None, Unset, str]): The unique ID of the agent (required on create) 25 configuration_options_schema (Union['AgentInputConfigurationOptionsSchema', None, Unset]): The configuration 26 options available for the agent 27 environment_configuration (Union['AgentInputEnvironmentConfiguration', None, Unset]): The environment 28 configuration for the agent Example: {'PARTITION': 'restart'}. 29 tags (Union['AgentInputTags', None, Unset]): The tags associated with the agent displayed to the user Example: 30 {'Support Email': 'it@company.com'}. 31 """ 32 33 name: str 34 agent_role_arn: str 35 id: Union[None, Unset, str] = UNSET 36 configuration_options_schema: Union["AgentInputConfigurationOptionsSchema", None, Unset] = UNSET 37 environment_configuration: Union["AgentInputEnvironmentConfiguration", None, Unset] = UNSET 38 tags: Union["AgentInputTags", None, Unset] = UNSET 39 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 40 41 def to_dict(self) -> Dict[str, Any]: 42 from ..models.agent_input_configuration_options_schema import AgentInputConfigurationOptionsSchema 43 from ..models.agent_input_environment_configuration import AgentInputEnvironmentConfiguration 44 from ..models.agent_input_tags import AgentInputTags 45 46 name = self.name 47 48 agent_role_arn = self.agent_role_arn 49 50 id: Union[None, Unset, str] 51 if isinstance(self.id, Unset): 52 id = UNSET 53 else: 54 id = self.id 55 56 configuration_options_schema: Union[Dict[str, Any], None, Unset] 57 if isinstance(self.configuration_options_schema, Unset): 58 configuration_options_schema = UNSET 59 elif isinstance(self.configuration_options_schema, AgentInputConfigurationOptionsSchema): 60 configuration_options_schema = self.configuration_options_schema.to_dict() 61 else: 62 configuration_options_schema = self.configuration_options_schema 63 64 environment_configuration: Union[Dict[str, Any], None, Unset] 65 if isinstance(self.environment_configuration, Unset): 66 environment_configuration = UNSET 67 elif isinstance(self.environment_configuration, AgentInputEnvironmentConfiguration): 68 environment_configuration = self.environment_configuration.to_dict() 69 else: 70 environment_configuration = self.environment_configuration 71 72 tags: Union[Dict[str, Any], None, Unset] 73 if isinstance(self.tags, Unset): 74 tags = UNSET 75 elif isinstance(self.tags, AgentInputTags): 76 tags = self.tags.to_dict() 77 else: 78 tags = self.tags 79 80 field_dict: Dict[str, Any] = {} 81 field_dict.update(self.additional_properties) 82 field_dict.update( 83 { 84 "name": name, 85 "agentRoleArn": agent_role_arn, 86 } 87 ) 88 if id is not UNSET: 89 field_dict["id"] = id 90 if configuration_options_schema is not UNSET: 91 field_dict["configurationOptionsSchema"] = configuration_options_schema 92 if environment_configuration is not UNSET: 93 field_dict["environmentConfiguration"] = environment_configuration 94 if tags is not UNSET: 95 field_dict["tags"] = tags 96 97 return field_dict 98 99 @classmethod 100 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 101 from ..models.agent_input_configuration_options_schema import AgentInputConfigurationOptionsSchema 102 from ..models.agent_input_environment_configuration import AgentInputEnvironmentConfiguration 103 from ..models.agent_input_tags import AgentInputTags 104 105 d = src_dict.copy() 106 name = d.pop("name") 107 108 agent_role_arn = d.pop("agentRoleArn") 109 110 def _parse_id(data: object) -> Union[None, Unset, str]: 111 if data is None: 112 return data 113 if isinstance(data, Unset): 114 return data 115 return cast(Union[None, Unset, str], data) 116 117 id = _parse_id(d.pop("id", UNSET)) 118 119 def _parse_configuration_options_schema( 120 data: object, 121 ) -> Union["AgentInputConfigurationOptionsSchema", None, Unset]: 122 if data is None: 123 return data 124 if isinstance(data, Unset): 125 return data 126 try: 127 if not isinstance(data, dict): 128 raise TypeError() 129 configuration_options_schema_type_0 = AgentInputConfigurationOptionsSchema.from_dict(data) 130 131 return configuration_options_schema_type_0 132 except: # noqa: E722 133 pass 134 return cast(Union["AgentInputConfigurationOptionsSchema", None, Unset], data) 135 136 configuration_options_schema = _parse_configuration_options_schema(d.pop("configurationOptionsSchema", UNSET)) 137 138 def _parse_environment_configuration(data: object) -> Union["AgentInputEnvironmentConfiguration", None, Unset]: 139 if data is None: 140 return data 141 if isinstance(data, Unset): 142 return data 143 try: 144 if not isinstance(data, dict): 145 raise TypeError() 146 environment_configuration_type_0 = AgentInputEnvironmentConfiguration.from_dict(data) 147 148 return environment_configuration_type_0 149 except: # noqa: E722 150 pass 151 return cast(Union["AgentInputEnvironmentConfiguration", None, Unset], data) 152 153 environment_configuration = _parse_environment_configuration(d.pop("environmentConfiguration", UNSET)) 154 155 def _parse_tags(data: object) -> Union["AgentInputTags", None, Unset]: 156 if data is None: 157 return data 158 if isinstance(data, Unset): 159 return data 160 try: 161 if not isinstance(data, dict): 162 raise TypeError() 163 tags_type_0 = AgentInputTags.from_dict(data) 164 165 return tags_type_0 166 except: # noqa: E722 167 pass 168 return cast(Union["AgentInputTags", None, Unset], data) 169 170 tags = _parse_tags(d.pop("tags", UNSET)) 171 172 agent_input = cls( 173 name=name, 174 agent_role_arn=agent_role_arn, 175 id=id, 176 configuration_options_schema=configuration_options_schema, 177 environment_configuration=environment_configuration, 178 tags=tags, 179 ) 180 181 agent_input.additional_properties = d 182 return agent_input 183 184 @property 185 def additional_keys(self) -> List[str]: 186 return list(self.additional_properties.keys())
Attributes:
- name (str): The display name of the agent
- agent_role_arn (str): Arn of the AWS IAM role or user that the agent will use (JSONSchema format)
- id (Union[None, Unset, str]): The unique ID of the agent (required on create)
- configuration_options_schema (Union['AgentInputConfigurationOptionsSchema', None, Unset]): The configuration options available for the agent
- environment_configuration (Union['AgentInputEnvironmentConfiguration', None, Unset]): The environment configuration for the agent Example: {'PARTITION': 'restart'}.
- tags (Union['AgentInputTags', None, Unset]): The tags associated with the agent displayed to the user Example: {'Support Email': 'it@company.com'}.
29def __init__(self, name, agent_role_arn, id=attr_dict['id'].default, configuration_options_schema=attr_dict['configuration_options_schema'].default, environment_configuration=attr_dict['environment_configuration'].default, tags=attr_dict['tags'].default): 30 self.name = name 31 self.agent_role_arn = agent_role_arn 32 self.id = id 33 self.configuration_options_schema = configuration_options_schema 34 self.environment_configuration = environment_configuration 35 self.tags = tags 36 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AgentInput.
41 def to_dict(self) -> Dict[str, Any]: 42 from ..models.agent_input_configuration_options_schema import AgentInputConfigurationOptionsSchema 43 from ..models.agent_input_environment_configuration import AgentInputEnvironmentConfiguration 44 from ..models.agent_input_tags import AgentInputTags 45 46 name = self.name 47 48 agent_role_arn = self.agent_role_arn 49 50 id: Union[None, Unset, str] 51 if isinstance(self.id, Unset): 52 id = UNSET 53 else: 54 id = self.id 55 56 configuration_options_schema: Union[Dict[str, Any], None, Unset] 57 if isinstance(self.configuration_options_schema, Unset): 58 configuration_options_schema = UNSET 59 elif isinstance(self.configuration_options_schema, AgentInputConfigurationOptionsSchema): 60 configuration_options_schema = self.configuration_options_schema.to_dict() 61 else: 62 configuration_options_schema = self.configuration_options_schema 63 64 environment_configuration: Union[Dict[str, Any], None, Unset] 65 if isinstance(self.environment_configuration, Unset): 66 environment_configuration = UNSET 67 elif isinstance(self.environment_configuration, AgentInputEnvironmentConfiguration): 68 environment_configuration = self.environment_configuration.to_dict() 69 else: 70 environment_configuration = self.environment_configuration 71 72 tags: Union[Dict[str, Any], None, Unset] 73 if isinstance(self.tags, Unset): 74 tags = UNSET 75 elif isinstance(self.tags, AgentInputTags): 76 tags = self.tags.to_dict() 77 else: 78 tags = self.tags 79 80 field_dict: Dict[str, Any] = {} 81 field_dict.update(self.additional_properties) 82 field_dict.update( 83 { 84 "name": name, 85 "agentRoleArn": agent_role_arn, 86 } 87 ) 88 if id is not UNSET: 89 field_dict["id"] = id 90 if configuration_options_schema is not UNSET: 91 field_dict["configurationOptionsSchema"] = configuration_options_schema 92 if environment_configuration is not UNSET: 93 field_dict["environmentConfiguration"] = environment_configuration 94 if tags is not UNSET: 95 field_dict["tags"] = tags 96 97 return field_dict
99 @classmethod 100 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 101 from ..models.agent_input_configuration_options_schema import AgentInputConfigurationOptionsSchema 102 from ..models.agent_input_environment_configuration import AgentInputEnvironmentConfiguration 103 from ..models.agent_input_tags import AgentInputTags 104 105 d = src_dict.copy() 106 name = d.pop("name") 107 108 agent_role_arn = d.pop("agentRoleArn") 109 110 def _parse_id(data: object) -> Union[None, Unset, str]: 111 if data is None: 112 return data 113 if isinstance(data, Unset): 114 return data 115 return cast(Union[None, Unset, str], data) 116 117 id = _parse_id(d.pop("id", UNSET)) 118 119 def _parse_configuration_options_schema( 120 data: object, 121 ) -> Union["AgentInputConfigurationOptionsSchema", None, Unset]: 122 if data is None: 123 return data 124 if isinstance(data, Unset): 125 return data 126 try: 127 if not isinstance(data, dict): 128 raise TypeError() 129 configuration_options_schema_type_0 = AgentInputConfigurationOptionsSchema.from_dict(data) 130 131 return configuration_options_schema_type_0 132 except: # noqa: E722 133 pass 134 return cast(Union["AgentInputConfigurationOptionsSchema", None, Unset], data) 135 136 configuration_options_schema = _parse_configuration_options_schema(d.pop("configurationOptionsSchema", UNSET)) 137 138 def _parse_environment_configuration(data: object) -> Union["AgentInputEnvironmentConfiguration", None, Unset]: 139 if data is None: 140 return data 141 if isinstance(data, Unset): 142 return data 143 try: 144 if not isinstance(data, dict): 145 raise TypeError() 146 environment_configuration_type_0 = AgentInputEnvironmentConfiguration.from_dict(data) 147 148 return environment_configuration_type_0 149 except: # noqa: E722 150 pass 151 return cast(Union["AgentInputEnvironmentConfiguration", None, Unset], data) 152 153 environment_configuration = _parse_environment_configuration(d.pop("environmentConfiguration", UNSET)) 154 155 def _parse_tags(data: object) -> Union["AgentInputTags", None, Unset]: 156 if data is None: 157 return data 158 if isinstance(data, Unset): 159 return data 160 try: 161 if not isinstance(data, dict): 162 raise TypeError() 163 tags_type_0 = AgentInputTags.from_dict(data) 164 165 return tags_type_0 166 except: # noqa: E722 167 pass 168 return cast(Union["AgentInputTags", None, Unset], data) 169 170 tags = _parse_tags(d.pop("tags", UNSET)) 171 172 agent_input = cls( 173 name=name, 174 agent_role_arn=agent_role_arn, 175 id=id, 176 configuration_options_schema=configuration_options_schema, 177 environment_configuration=environment_configuration, 178 tags=tags, 179 ) 180 181 agent_input.additional_properties = d 182 return agent_input
10@_attrs_define 11class AgentInputConfigurationOptionsSchema: 12 """The configuration options available for the agent""" 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 agent_input_configuration_options_schema = cls() 27 28 agent_input_configuration_options_schema.additional_properties = d 29 return agent_input_configuration_options_schema 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
The configuration options available for the agent
Method generated by attrs for class AgentInputConfigurationOptionsSchema.
10@_attrs_define 11class AgentInputEnvironmentConfiguration: 12 """The environment configuration for the agent 13 14 Example: 15 {'PARTITION': 'restart'} 16 17 """ 18 19 additional_properties: Dict[str, str] = _attrs_field(init=False, factory=dict) 20 21 def to_dict(self) -> Dict[str, Any]: 22 field_dict: Dict[str, Any] = {} 23 field_dict.update(self.additional_properties) 24 field_dict.update({}) 25 26 return field_dict 27 28 @classmethod 29 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 30 d = src_dict.copy() 31 agent_input_environment_configuration = cls() 32 33 agent_input_environment_configuration.additional_properties = d 34 return agent_input_environment_configuration 35 36 @property 37 def additional_keys(self) -> List[str]: 38 return list(self.additional_properties.keys())
The environment configuration for the agent
Example:
{'PARTITION': 'restart'}
Method generated by attrs for class AgentInputEnvironmentConfiguration.
10@_attrs_define 11class AgentInputTags: 12 """The tags associated with the agent displayed to the user 13 14 Example: 15 {'Support Email': 'it@company.com'} 16 17 """ 18 19 additional_properties: Dict[str, str] = _attrs_field(init=False, factory=dict) 20 21 def to_dict(self) -> Dict[str, Any]: 22 field_dict: Dict[str, Any] = {} 23 field_dict.update(self.additional_properties) 24 field_dict.update({}) 25 26 return field_dict 27 28 @classmethod 29 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 30 d = src_dict.copy() 31 agent_input_tags = cls() 32 33 agent_input_tags.additional_properties = d 34 return agent_input_tags 35 36 @property 37 def additional_keys(self) -> List[str]: 38 return list(self.additional_properties.keys())
The tags associated with the agent displayed to the user
Example:
{'Support Email': 'it@company.com'}
10@_attrs_define 11class AgentRegistration: 12 """ 13 Attributes: 14 local_ip (str): 15 remote_ip (str): 16 agent_version (str): 17 hostname (str): 18 os (str): 19 """ 20 21 local_ip: str 22 remote_ip: str 23 agent_version: str 24 hostname: str 25 os: str 26 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 27 28 def to_dict(self) -> Dict[str, Any]: 29 local_ip = self.local_ip 30 31 remote_ip = self.remote_ip 32 33 agent_version = self.agent_version 34 35 hostname = self.hostname 36 37 os = self.os 38 39 field_dict: Dict[str, Any] = {} 40 field_dict.update(self.additional_properties) 41 field_dict.update( 42 { 43 "localIp": local_ip, 44 "remoteIp": remote_ip, 45 "agentVersion": agent_version, 46 "hostname": hostname, 47 "os": os, 48 } 49 ) 50 51 return field_dict 52 53 @classmethod 54 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 55 d = src_dict.copy() 56 local_ip = d.pop("localIp") 57 58 remote_ip = d.pop("remoteIp") 59 60 agent_version = d.pop("agentVersion") 61 62 hostname = d.pop("hostname") 63 64 os = d.pop("os") 65 66 agent_registration = cls( 67 local_ip=local_ip, 68 remote_ip=remote_ip, 69 agent_version=agent_version, 70 hostname=hostname, 71 os=os, 72 ) 73 74 agent_registration.additional_properties = d 75 return agent_registration 76 77 @property 78 def additional_keys(self) -> List[str]: 79 return list(self.additional_properties.keys())
Attributes:
- local_ip (str):
- remote_ip (str):
- agent_version (str):
- hostname (str):
- os (str):
28def __init__(self, local_ip, remote_ip, agent_version, hostname, os): 29 self.local_ip = local_ip 30 self.remote_ip = remote_ip 31 self.agent_version = agent_version 32 self.hostname = hostname 33 self.os = os 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AgentRegistration.
28 def to_dict(self) -> Dict[str, Any]: 29 local_ip = self.local_ip 30 31 remote_ip = self.remote_ip 32 33 agent_version = self.agent_version 34 35 hostname = self.hostname 36 37 os = self.os 38 39 field_dict: Dict[str, Any] = {} 40 field_dict.update(self.additional_properties) 41 field_dict.update( 42 { 43 "localIp": local_ip, 44 "remoteIp": remote_ip, 45 "agentVersion": agent_version, 46 "hostname": hostname, 47 "os": os, 48 } 49 ) 50 51 return field_dict
53 @classmethod 54 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 55 d = src_dict.copy() 56 local_ip = d.pop("localIp") 57 58 remote_ip = d.pop("remoteIp") 59 60 agent_version = d.pop("agentVersion") 61 62 hostname = d.pop("hostname") 63 64 os = d.pop("os") 65 66 agent_registration = cls( 67 local_ip=local_ip, 68 remote_ip=remote_ip, 69 agent_version=agent_version, 70 hostname=hostname, 71 os=os, 72 ) 73 74 agent_registration.additional_properties = d 75 return agent_registration
5class AgentStatus(str, Enum): 6 OFFLINE = "OFFLINE" 7 ONLINE = "ONLINE" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
10@_attrs_define 11class AgentTags: 12 """Tags associated with the agent""" 13 14 additional_properties: Dict[str, str] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 agent_tags = cls() 27 28 agent_tags.additional_properties = d 29 return agent_tags 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
Tags associated with the agent
14@_attrs_define 15class AllowedDataType: 16 """ 17 Attributes: 18 description (str): 19 error_msg (str): 20 allowed_patterns (List['FileNamePattern']): 21 """ 22 23 description: str 24 error_msg: str 25 allowed_patterns: List["FileNamePattern"] 26 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 27 28 def to_dict(self) -> Dict[str, Any]: 29 description = self.description 30 31 error_msg = self.error_msg 32 33 allowed_patterns = [] 34 for allowed_patterns_item_data in self.allowed_patterns: 35 allowed_patterns_item = allowed_patterns_item_data.to_dict() 36 allowed_patterns.append(allowed_patterns_item) 37 38 field_dict: Dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "description": description, 43 "errorMsg": error_msg, 44 "allowedPatterns": allowed_patterns, 45 } 46 ) 47 48 return field_dict 49 50 @classmethod 51 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 52 from ..models.file_name_pattern import FileNamePattern 53 54 d = src_dict.copy() 55 description = d.pop("description") 56 57 error_msg = d.pop("errorMsg") 58 59 allowed_patterns = [] 60 _allowed_patterns = d.pop("allowedPatterns") 61 for allowed_patterns_item_data in _allowed_patterns: 62 allowed_patterns_item = FileNamePattern.from_dict(allowed_patterns_item_data) 63 64 allowed_patterns.append(allowed_patterns_item) 65 66 allowed_data_type = cls( 67 description=description, 68 error_msg=error_msg, 69 allowed_patterns=allowed_patterns, 70 ) 71 72 allowed_data_type.additional_properties = d 73 return allowed_data_type 74 75 @property 76 def additional_keys(self) -> List[str]: 77 return list(self.additional_properties.keys())
Attributes:
- description (str):
- error_msg (str):
- allowed_patterns (List['FileNamePattern']):
26def __init__(self, description, error_msg, allowed_patterns): 27 self.description = description 28 self.error_msg = error_msg 29 self.allowed_patterns = allowed_patterns 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AllowedDataType.
28 def to_dict(self) -> Dict[str, Any]: 29 description = self.description 30 31 error_msg = self.error_msg 32 33 allowed_patterns = [] 34 for allowed_patterns_item_data in self.allowed_patterns: 35 allowed_patterns_item = allowed_patterns_item_data.to_dict() 36 allowed_patterns.append(allowed_patterns_item) 37 38 field_dict: Dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "description": description, 43 "errorMsg": error_msg, 44 "allowedPatterns": allowed_patterns, 45 } 46 ) 47 48 return field_dict
50 @classmethod 51 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 52 from ..models.file_name_pattern import FileNamePattern 53 54 d = src_dict.copy() 55 description = d.pop("description") 56 57 error_msg = d.pop("errorMsg") 58 59 allowed_patterns = [] 60 _allowed_patterns = d.pop("allowedPatterns") 61 for allowed_patterns_item_data in _allowed_patterns: 62 allowed_patterns_item = FileNamePattern.from_dict(allowed_patterns_item_data) 63 64 allowed_patterns.append(allowed_patterns_item) 65 66 allowed_data_type = cls( 67 description=description, 68 error_msg=error_msg, 69 allowed_patterns=allowed_patterns, 70 ) 71 72 allowed_data_type.additional_properties = d 73 return allowed_data_type
12@_attrs_define 13class ApproveProjectAccessRequest: 14 """ 15 Attributes: 16 role (ProjectRole): 17 """ 18 19 role: ProjectRole 20 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 21 22 def to_dict(self) -> Dict[str, Any]: 23 role = self.role.value 24 25 field_dict: Dict[str, Any] = {} 26 field_dict.update(self.additional_properties) 27 field_dict.update( 28 { 29 "role": role, 30 } 31 ) 32 33 return field_dict 34 35 @classmethod 36 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 37 d = src_dict.copy() 38 role = ProjectRole(d.pop("role")) 39 40 approve_project_access_request = cls( 41 role=role, 42 ) 43 44 approve_project_access_request.additional_properties = d 45 return approve_project_access_request 46 47 @property 48 def additional_keys(self) -> List[str]: 49 return list(self.additional_properties.keys())
Attributes:
- role (ProjectRole):
24def __init__(self, role): 25 self.role = role 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ApproveProjectAccessRequest.
35 @classmethod 36 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 37 d = src_dict.copy() 38 role = ProjectRole(d.pop("role")) 39 40 approve_project_access_request = cls( 41 role=role, 42 ) 43 44 approve_project_access_request.additional_properties = d 45 return approve_project_access_request
12@_attrs_define 13class Artifact: 14 """A secondary file or resource associated with a dataset 15 16 Attributes: 17 type (ArtifactType): 18 path (str): 19 """ 20 21 type: ArtifactType 22 path: str 23 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 24 25 def to_dict(self) -> Dict[str, Any]: 26 type = self.type.value 27 28 path = self.path 29 30 field_dict: Dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "type": type, 35 "path": path, 36 } 37 ) 38 39 return field_dict 40 41 @classmethod 42 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 43 d = src_dict.copy() 44 type = ArtifactType(d.pop("type")) 45 46 path = d.pop("path") 47 48 artifact = cls( 49 type=type, 50 path=path, 51 ) 52 53 artifact.additional_properties = d 54 return artifact 55 56 @property 57 def additional_keys(self) -> List[str]: 58 return list(self.additional_properties.keys())
A secondary file or resource associated with a dataset
Attributes:
- type (ArtifactType):
- path (str):
25def __init__(self, type, path): 26 self.type = type 27 self.path = path 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Artifact.
5class ArtifactType(str, Enum): 6 FILES = "FILES" 7 """ Files used in the workflow """ 8 INGEST_MANIFEST = "INGEST_MANIFEST" 9 """ Files expected to upload """ 10 METADATA = "METADATA" 11 """ Snapshot of metadata at the time of execution """ 12 SAMPLE_SHEET = "SAMPLE_SHEET" 13 """ Samples used in the workflow """ 14 WORKFLOW_COMPUTE_CONFIG = "WORKFLOW_COMPUTE_CONFIG" 15 """ Compute overrides used in the workflow """ 16 WORKFLOW_DAG = "WORKFLOW_DAG" 17 """ Direct acyclic graph of workflow execution """ 18 WORKFLOW_DEBUG_LOGS = "WORKFLOW_DEBUG_LOGS" 19 """ Debug logs from workflow engine """ 20 WORKFLOW_LOGS = "WORKFLOW_LOGS" 21 """ Logs from workflow engine """ 22 WORKFLOW_OPTIONS = "WORKFLOW_OPTIONS" 23 """ Options used in the workflow """ 24 WORKFLOW_PARAMETERS = "WORKFLOW_PARAMETERS" 25 """ Parameters used in the workflow """ 26 WORKFLOW_REPORT = "WORKFLOW_REPORT" 27 """ Execution report from workflow engine """ 28 WORKFLOW_TIMELINE = "WORKFLOW_TIMELINE" 29 """ Timeline of workflow execution """ 30 WORKFLOW_TRACE = "WORKFLOW_TRACE" 31 """ Trace of workflow execution """ 32 UNKNOWN = "UNKNOWN" 33 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 34 35 def __str__(self) -> str: 36 return str(self.value) 37 38 @classmethod 39 def _missing_(cls, number): 40 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
Compute overrides used in the workflow
Direct acyclic graph of workflow execution
Debug logs from workflow engine
Parameters used in the workflow
Execution report from workflow engine
Timeline of workflow execution
This is a fallback value for when the value is not known, do not use this value when making requests
19@_attrs_define 20class AuditEvent: 21 """ 22 Attributes: 23 id (Union[Unset, str]): The unique identifier for the audit event 24 event_type (Union[Unset, str]): The type of event Example: CREATE. 25 project_id (Union[Unset, str]): The project ID associated with the event (if applicable) 26 entity_id (Union[Unset, str]): The entity ID associated with the event 27 entity_type (Union[Unset, str]): The entity type associated with the event Example: Project. 28 event_detail (Union['AuditEventEventDetail', None, Unset]): The details of the event, such as the request 29 details sent from the client 30 changes (Union['AuditEventChanges', None, Unset]): The changes made to the entity (if applicable) Example: 31 {'.settings.retentionPolicyDays': '1 -> 2'}. 32 username (Union[Unset, str]): The username of the user who performed the action Example: admin@cirro.bio. 33 ip_address (Union[Unset, str]): The IP address of the user who performed the action Example: 0.0.0.0. 34 created_at (Union[Unset, datetime.datetime]): The date and time the event was created 35 """ 36 37 id: Union[Unset, str] = UNSET 38 event_type: Union[Unset, str] = UNSET 39 project_id: Union[Unset, str] = UNSET 40 entity_id: Union[Unset, str] = UNSET 41 entity_type: Union[Unset, str] = UNSET 42 event_detail: Union["AuditEventEventDetail", None, Unset] = UNSET 43 changes: Union["AuditEventChanges", None, Unset] = UNSET 44 username: Union[Unset, str] = UNSET 45 ip_address: Union[Unset, str] = UNSET 46 created_at: Union[Unset, datetime.datetime] = UNSET 47 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 48 49 def to_dict(self) -> Dict[str, Any]: 50 from ..models.audit_event_changes import AuditEventChanges 51 from ..models.audit_event_event_detail import AuditEventEventDetail 52 53 id = self.id 54 55 event_type = self.event_type 56 57 project_id = self.project_id 58 59 entity_id = self.entity_id 60 61 entity_type = self.entity_type 62 63 event_detail: Union[Dict[str, Any], None, Unset] 64 if isinstance(self.event_detail, Unset): 65 event_detail = UNSET 66 elif isinstance(self.event_detail, AuditEventEventDetail): 67 event_detail = self.event_detail.to_dict() 68 else: 69 event_detail = self.event_detail 70 71 changes: Union[Dict[str, Any], None, Unset] 72 if isinstance(self.changes, Unset): 73 changes = UNSET 74 elif isinstance(self.changes, AuditEventChanges): 75 changes = self.changes.to_dict() 76 else: 77 changes = self.changes 78 79 username = self.username 80 81 ip_address = self.ip_address 82 83 created_at: Union[Unset, str] = UNSET 84 if not isinstance(self.created_at, Unset): 85 created_at = self.created_at.isoformat() 86 87 field_dict: Dict[str, Any] = {} 88 field_dict.update(self.additional_properties) 89 field_dict.update({}) 90 if id is not UNSET: 91 field_dict["id"] = id 92 if event_type is not UNSET: 93 field_dict["eventType"] = event_type 94 if project_id is not UNSET: 95 field_dict["projectId"] = project_id 96 if entity_id is not UNSET: 97 field_dict["entityId"] = entity_id 98 if entity_type is not UNSET: 99 field_dict["entityType"] = entity_type 100 if event_detail is not UNSET: 101 field_dict["eventDetail"] = event_detail 102 if changes is not UNSET: 103 field_dict["changes"] = changes 104 if username is not UNSET: 105 field_dict["username"] = username 106 if ip_address is not UNSET: 107 field_dict["ipAddress"] = ip_address 108 if created_at is not UNSET: 109 field_dict["createdAt"] = created_at 110 111 return field_dict 112 113 @classmethod 114 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 115 from ..models.audit_event_changes import AuditEventChanges 116 from ..models.audit_event_event_detail import AuditEventEventDetail 117 118 d = src_dict.copy() 119 id = d.pop("id", UNSET) 120 121 event_type = d.pop("eventType", UNSET) 122 123 project_id = d.pop("projectId", UNSET) 124 125 entity_id = d.pop("entityId", UNSET) 126 127 entity_type = d.pop("entityType", UNSET) 128 129 def _parse_event_detail(data: object) -> Union["AuditEventEventDetail", None, Unset]: 130 if data is None: 131 return data 132 if isinstance(data, Unset): 133 return data 134 try: 135 if not isinstance(data, dict): 136 raise TypeError() 137 event_detail_type_0 = AuditEventEventDetail.from_dict(data) 138 139 return event_detail_type_0 140 except: # noqa: E722 141 pass 142 return cast(Union["AuditEventEventDetail", None, Unset], data) 143 144 event_detail = _parse_event_detail(d.pop("eventDetail", UNSET)) 145 146 def _parse_changes(data: object) -> Union["AuditEventChanges", None, Unset]: 147 if data is None: 148 return data 149 if isinstance(data, Unset): 150 return data 151 try: 152 if not isinstance(data, dict): 153 raise TypeError() 154 changes_type_0 = AuditEventChanges.from_dict(data) 155 156 return changes_type_0 157 except: # noqa: E722 158 pass 159 return cast(Union["AuditEventChanges", None, Unset], data) 160 161 changes = _parse_changes(d.pop("changes", UNSET)) 162 163 username = d.pop("username", UNSET) 164 165 ip_address = d.pop("ipAddress", UNSET) 166 167 _created_at = d.pop("createdAt", UNSET) 168 created_at: Union[Unset, datetime.datetime] 169 if isinstance(_created_at, Unset): 170 created_at = UNSET 171 else: 172 created_at = isoparse(_created_at) 173 174 audit_event = cls( 175 id=id, 176 event_type=event_type, 177 project_id=project_id, 178 entity_id=entity_id, 179 entity_type=entity_type, 180 event_detail=event_detail, 181 changes=changes, 182 username=username, 183 ip_address=ip_address, 184 created_at=created_at, 185 ) 186 187 audit_event.additional_properties = d 188 return audit_event 189 190 @property 191 def additional_keys(self) -> List[str]: 192 return list(self.additional_properties.keys())
Attributes:
- id (Union[Unset, str]): The unique identifier for the audit event
- event_type (Union[Unset, str]): The type of event Example: CREATE.
- project_id (Union[Unset, str]): The project ID associated with the event (if applicable)
- entity_id (Union[Unset, str]): The entity ID associated with the event
- entity_type (Union[Unset, str]): The entity type associated with the event Example: Project.
- event_detail (Union['AuditEventEventDetail', None, Unset]): The details of the event, such as the request details sent from the client
- changes (Union['AuditEventChanges', None, Unset]): The changes made to the entity (if applicable) Example: {'.settings.retentionPolicyDays': '1 -> 2'}.
- username (Union[Unset, str]): The username of the user who performed the action Example: admin@cirro.bio.
- ip_address (Union[Unset, str]): The IP address of the user who performed the action Example: 0.0.0.0.
- created_at (Union[Unset, datetime.datetime]): The date and time the event was created
33def __init__(self, id=attr_dict['id'].default, event_type=attr_dict['event_type'].default, project_id=attr_dict['project_id'].default, entity_id=attr_dict['entity_id'].default, entity_type=attr_dict['entity_type'].default, event_detail=attr_dict['event_detail'].default, changes=attr_dict['changes'].default, username=attr_dict['username'].default, ip_address=attr_dict['ip_address'].default, created_at=attr_dict['created_at'].default): 34 self.id = id 35 self.event_type = event_type 36 self.project_id = project_id 37 self.entity_id = entity_id 38 self.entity_type = entity_type 39 self.event_detail = event_detail 40 self.changes = changes 41 self.username = username 42 self.ip_address = ip_address 43 self.created_at = created_at 44 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AuditEvent.
49 def to_dict(self) -> Dict[str, Any]: 50 from ..models.audit_event_changes import AuditEventChanges 51 from ..models.audit_event_event_detail import AuditEventEventDetail 52 53 id = self.id 54 55 event_type = self.event_type 56 57 project_id = self.project_id 58 59 entity_id = self.entity_id 60 61 entity_type = self.entity_type 62 63 event_detail: Union[Dict[str, Any], None, Unset] 64 if isinstance(self.event_detail, Unset): 65 event_detail = UNSET 66 elif isinstance(self.event_detail, AuditEventEventDetail): 67 event_detail = self.event_detail.to_dict() 68 else: 69 event_detail = self.event_detail 70 71 changes: Union[Dict[str, Any], None, Unset] 72 if isinstance(self.changes, Unset): 73 changes = UNSET 74 elif isinstance(self.changes, AuditEventChanges): 75 changes = self.changes.to_dict() 76 else: 77 changes = self.changes 78 79 username = self.username 80 81 ip_address = self.ip_address 82 83 created_at: Union[Unset, str] = UNSET 84 if not isinstance(self.created_at, Unset): 85 created_at = self.created_at.isoformat() 86 87 field_dict: Dict[str, Any] = {} 88 field_dict.update(self.additional_properties) 89 field_dict.update({}) 90 if id is not UNSET: 91 field_dict["id"] = id 92 if event_type is not UNSET: 93 field_dict["eventType"] = event_type 94 if project_id is not UNSET: 95 field_dict["projectId"] = project_id 96 if entity_id is not UNSET: 97 field_dict["entityId"] = entity_id 98 if entity_type is not UNSET: 99 field_dict["entityType"] = entity_type 100 if event_detail is not UNSET: 101 field_dict["eventDetail"] = event_detail 102 if changes is not UNSET: 103 field_dict["changes"] = changes 104 if username is not UNSET: 105 field_dict["username"] = username 106 if ip_address is not UNSET: 107 field_dict["ipAddress"] = ip_address 108 if created_at is not UNSET: 109 field_dict["createdAt"] = created_at 110 111 return field_dict
113 @classmethod 114 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 115 from ..models.audit_event_changes import AuditEventChanges 116 from ..models.audit_event_event_detail import AuditEventEventDetail 117 118 d = src_dict.copy() 119 id = d.pop("id", UNSET) 120 121 event_type = d.pop("eventType", UNSET) 122 123 project_id = d.pop("projectId", UNSET) 124 125 entity_id = d.pop("entityId", UNSET) 126 127 entity_type = d.pop("entityType", UNSET) 128 129 def _parse_event_detail(data: object) -> Union["AuditEventEventDetail", None, Unset]: 130 if data is None: 131 return data 132 if isinstance(data, Unset): 133 return data 134 try: 135 if not isinstance(data, dict): 136 raise TypeError() 137 event_detail_type_0 = AuditEventEventDetail.from_dict(data) 138 139 return event_detail_type_0 140 except: # noqa: E722 141 pass 142 return cast(Union["AuditEventEventDetail", None, Unset], data) 143 144 event_detail = _parse_event_detail(d.pop("eventDetail", UNSET)) 145 146 def _parse_changes(data: object) -> Union["AuditEventChanges", None, Unset]: 147 if data is None: 148 return data 149 if isinstance(data, Unset): 150 return data 151 try: 152 if not isinstance(data, dict): 153 raise TypeError() 154 changes_type_0 = AuditEventChanges.from_dict(data) 155 156 return changes_type_0 157 except: # noqa: E722 158 pass 159 return cast(Union["AuditEventChanges", None, Unset], data) 160 161 changes = _parse_changes(d.pop("changes", UNSET)) 162 163 username = d.pop("username", UNSET) 164 165 ip_address = d.pop("ipAddress", UNSET) 166 167 _created_at = d.pop("createdAt", UNSET) 168 created_at: Union[Unset, datetime.datetime] 169 if isinstance(_created_at, Unset): 170 created_at = UNSET 171 else: 172 created_at = isoparse(_created_at) 173 174 audit_event = cls( 175 id=id, 176 event_type=event_type, 177 project_id=project_id, 178 entity_id=entity_id, 179 entity_type=entity_type, 180 event_detail=event_detail, 181 changes=changes, 182 username=username, 183 ip_address=ip_address, 184 created_at=created_at, 185 ) 186 187 audit_event.additional_properties = d 188 return audit_event
10@_attrs_define 11class AuditEventChanges: 12 """The changes made to the entity (if applicable) 13 14 Example: 15 {'.settings.retentionPolicyDays': '1 -> 2'} 16 17 """ 18 19 additional_properties: Dict[str, str] = _attrs_field(init=False, factory=dict) 20 21 def to_dict(self) -> Dict[str, Any]: 22 field_dict: Dict[str, Any] = {} 23 field_dict.update(self.additional_properties) 24 field_dict.update({}) 25 26 return field_dict 27 28 @classmethod 29 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 30 d = src_dict.copy() 31 audit_event_changes = cls() 32 33 audit_event_changes.additional_properties = d 34 return audit_event_changes 35 36 @property 37 def additional_keys(self) -> List[str]: 38 return list(self.additional_properties.keys())
The changes made to the entity (if applicable)
Example:
{'.settings.retentionPolicyDays': '1 -> 2'}
10@_attrs_define 11class AuditEventEventDetail: 12 """The details of the event, such as the request details sent from the client""" 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 audit_event_event_detail = cls() 27 28 audit_event_event_detail.additional_properties = d 29 return audit_event_event_detail 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
The details of the event, such as the request details sent from the client
10@_attrs_define 11class AuthInfo: 12 """ 13 Attributes: 14 user_pool_id (str): 15 sdk_app_id (str): 16 ui_app_id (str): 17 drive_app_id (str): 18 endpoint (str): 19 """ 20 21 user_pool_id: str 22 sdk_app_id: str 23 ui_app_id: str 24 drive_app_id: str 25 endpoint: str 26 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 27 28 def to_dict(self) -> Dict[str, Any]: 29 user_pool_id = self.user_pool_id 30 31 sdk_app_id = self.sdk_app_id 32 33 ui_app_id = self.ui_app_id 34 35 drive_app_id = self.drive_app_id 36 37 endpoint = self.endpoint 38 39 field_dict: Dict[str, Any] = {} 40 field_dict.update(self.additional_properties) 41 field_dict.update( 42 { 43 "userPoolId": user_pool_id, 44 "sdkAppId": sdk_app_id, 45 "uiAppId": ui_app_id, 46 "driveAppId": drive_app_id, 47 "endpoint": endpoint, 48 } 49 ) 50 51 return field_dict 52 53 @classmethod 54 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 55 d = src_dict.copy() 56 user_pool_id = d.pop("userPoolId") 57 58 sdk_app_id = d.pop("sdkAppId") 59 60 ui_app_id = d.pop("uiAppId") 61 62 drive_app_id = d.pop("driveAppId") 63 64 endpoint = d.pop("endpoint") 65 66 auth_info = cls( 67 user_pool_id=user_pool_id, 68 sdk_app_id=sdk_app_id, 69 ui_app_id=ui_app_id, 70 drive_app_id=drive_app_id, 71 endpoint=endpoint, 72 ) 73 74 auth_info.additional_properties = d 75 return auth_info 76 77 @property 78 def additional_keys(self) -> List[str]: 79 return list(self.additional_properties.keys())
Attributes:
- user_pool_id (str):
- sdk_app_id (str):
- ui_app_id (str):
- drive_app_id (str):
- endpoint (str):
28def __init__(self, user_pool_id, sdk_app_id, ui_app_id, drive_app_id, endpoint): 29 self.user_pool_id = user_pool_id 30 self.sdk_app_id = sdk_app_id 31 self.ui_app_id = ui_app_id 32 self.drive_app_id = drive_app_id 33 self.endpoint = endpoint 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AuthInfo.
28 def to_dict(self) -> Dict[str, Any]: 29 user_pool_id = self.user_pool_id 30 31 sdk_app_id = self.sdk_app_id 32 33 ui_app_id = self.ui_app_id 34 35 drive_app_id = self.drive_app_id 36 37 endpoint = self.endpoint 38 39 field_dict: Dict[str, Any] = {} 40 field_dict.update(self.additional_properties) 41 field_dict.update( 42 { 43 "userPoolId": user_pool_id, 44 "sdkAppId": sdk_app_id, 45 "uiAppId": ui_app_id, 46 "driveAppId": drive_app_id, 47 "endpoint": endpoint, 48 } 49 ) 50 51 return field_dict
53 @classmethod 54 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 55 d = src_dict.copy() 56 user_pool_id = d.pop("userPoolId") 57 58 sdk_app_id = d.pop("sdkAppId") 59 60 ui_app_id = d.pop("uiAppId") 61 62 drive_app_id = d.pop("driveAppId") 63 64 endpoint = d.pop("endpoint") 65 66 auth_info = cls( 67 user_pool_id=user_pool_id, 68 sdk_app_id=sdk_app_id, 69 ui_app_id=ui_app_id, 70 drive_app_id=drive_app_id, 71 endpoint=endpoint, 72 ) 73 74 auth_info.additional_properties = d 75 return auth_info
14@_attrs_define 15class AWSCredentials: 16 """ 17 Attributes: 18 access_key_id (str): 19 secret_access_key (str): 20 session_token (str): 21 expiration (datetime.datetime): 22 region (Union[Unset, str]): Region of requested resource (i.e., S3 Bucket) 23 """ 24 25 access_key_id: str 26 secret_access_key: str 27 session_token: str 28 expiration: datetime.datetime 29 region: Union[Unset, str] = UNSET 30 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 31 32 def to_dict(self) -> Dict[str, Any]: 33 access_key_id = self.access_key_id 34 35 secret_access_key = self.secret_access_key 36 37 session_token = self.session_token 38 39 expiration = self.expiration.isoformat() 40 41 region = self.region 42 43 field_dict: Dict[str, Any] = {} 44 field_dict.update(self.additional_properties) 45 field_dict.update( 46 { 47 "accessKeyId": access_key_id, 48 "secretAccessKey": secret_access_key, 49 "sessionToken": session_token, 50 "expiration": expiration, 51 } 52 ) 53 if region is not UNSET: 54 field_dict["region"] = region 55 56 return field_dict 57 58 @classmethod 59 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 60 d = src_dict.copy() 61 access_key_id = d.pop("accessKeyId") 62 63 secret_access_key = d.pop("secretAccessKey") 64 65 session_token = d.pop("sessionToken") 66 67 expiration = isoparse(d.pop("expiration")) 68 69 region = d.pop("region", UNSET) 70 71 aws_credentials = cls( 72 access_key_id=access_key_id, 73 secret_access_key=secret_access_key, 74 session_token=session_token, 75 expiration=expiration, 76 region=region, 77 ) 78 79 aws_credentials.additional_properties = d 80 return aws_credentials 81 82 @property 83 def additional_keys(self) -> List[str]: 84 return list(self.additional_properties.keys())
Attributes:
- access_key_id (str):
- secret_access_key (str):
- session_token (str):
- expiration (datetime.datetime):
- region (Union[Unset, str]): Region of requested resource (i.e., S3 Bucket)
28def __init__(self, access_key_id, secret_access_key, session_token, expiration, region=attr_dict['region'].default): 29 self.access_key_id = access_key_id 30 self.secret_access_key = secret_access_key 31 self.session_token = session_token 32 self.expiration = expiration 33 self.region = region 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class AWSCredentials.
32 def to_dict(self) -> Dict[str, Any]: 33 access_key_id = self.access_key_id 34 35 secret_access_key = self.secret_access_key 36 37 session_token = self.session_token 38 39 expiration = self.expiration.isoformat() 40 41 region = self.region 42 43 field_dict: Dict[str, Any] = {} 44 field_dict.update(self.additional_properties) 45 field_dict.update( 46 { 47 "accessKeyId": access_key_id, 48 "secretAccessKey": secret_access_key, 49 "sessionToken": session_token, 50 "expiration": expiration, 51 } 52 ) 53 if region is not UNSET: 54 field_dict["region"] = region 55 56 return field_dict
58 @classmethod 59 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 60 d = src_dict.copy() 61 access_key_id = d.pop("accessKeyId") 62 63 secret_access_key = d.pop("secretAccessKey") 64 65 session_token = d.pop("sessionToken") 66 67 expiration = isoparse(d.pop("expiration")) 68 69 region = d.pop("region", UNSET) 70 71 aws_credentials = cls( 72 access_key_id=access_key_id, 73 secret_access_key=secret_access_key, 74 session_token=session_token, 75 expiration=expiration, 76 region=region, 77 ) 78 79 aws_credentials.additional_properties = d 80 return aws_credentials
17@_attrs_define 18class BillingAccount: 19 """ 20 Attributes: 21 id (str): 22 name (str): 23 organization (str): 24 contacts (List['Contact']): 25 customer_type (CustomerType): 26 billing_method (BillingMethod): 27 primary_budget_number (str): 28 owner (str): 29 shared_with (List[str]): 30 is_archived (bool): 31 """ 32 33 id: str 34 name: str 35 organization: str 36 contacts: List["Contact"] 37 customer_type: CustomerType 38 billing_method: BillingMethod 39 primary_budget_number: str 40 owner: str 41 shared_with: List[str] 42 is_archived: bool 43 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 44 45 def to_dict(self) -> Dict[str, Any]: 46 id = self.id 47 48 name = self.name 49 50 organization = self.organization 51 52 contacts = [] 53 for contacts_item_data in self.contacts: 54 contacts_item = contacts_item_data.to_dict() 55 contacts.append(contacts_item) 56 57 customer_type = self.customer_type.value 58 59 billing_method = self.billing_method.value 60 61 primary_budget_number = self.primary_budget_number 62 63 owner = self.owner 64 65 shared_with = self.shared_with 66 67 is_archived = self.is_archived 68 69 field_dict: Dict[str, Any] = {} 70 field_dict.update(self.additional_properties) 71 field_dict.update( 72 { 73 "id": id, 74 "name": name, 75 "organization": organization, 76 "contacts": contacts, 77 "customerType": customer_type, 78 "billingMethod": billing_method, 79 "primaryBudgetNumber": primary_budget_number, 80 "owner": owner, 81 "sharedWith": shared_with, 82 "isArchived": is_archived, 83 } 84 ) 85 86 return field_dict 87 88 @classmethod 89 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 90 from ..models.contact import Contact 91 92 d = src_dict.copy() 93 id = d.pop("id") 94 95 name = d.pop("name") 96 97 organization = d.pop("organization") 98 99 contacts = [] 100 _contacts = d.pop("contacts") 101 for contacts_item_data in _contacts: 102 contacts_item = Contact.from_dict(contacts_item_data) 103 104 contacts.append(contacts_item) 105 106 customer_type = CustomerType(d.pop("customerType")) 107 108 billing_method = BillingMethod(d.pop("billingMethod")) 109 110 primary_budget_number = d.pop("primaryBudgetNumber") 111 112 owner = d.pop("owner") 113 114 shared_with = cast(List[str], d.pop("sharedWith")) 115 116 is_archived = d.pop("isArchived") 117 118 billing_account = cls( 119 id=id, 120 name=name, 121 organization=organization, 122 contacts=contacts, 123 customer_type=customer_type, 124 billing_method=billing_method, 125 primary_budget_number=primary_budget_number, 126 owner=owner, 127 shared_with=shared_with, 128 is_archived=is_archived, 129 ) 130 131 billing_account.additional_properties = d 132 return billing_account 133 134 @property 135 def additional_keys(self) -> List[str]: 136 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- organization (str):
- contacts (List['Contact']):
- customer_type (CustomerType):
- billing_method (BillingMethod):
- primary_budget_number (str):
- owner (str):
- shared_with (List[str]):
- is_archived (bool):
33def __init__(self, id, name, organization, contacts, customer_type, billing_method, primary_budget_number, owner, shared_with, is_archived): 34 self.id = id 35 self.name = name 36 self.organization = organization 37 self.contacts = contacts 38 self.customer_type = customer_type 39 self.billing_method = billing_method 40 self.primary_budget_number = primary_budget_number 41 self.owner = owner 42 self.shared_with = shared_with 43 self.is_archived = is_archived 44 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class BillingAccount.
45 def to_dict(self) -> Dict[str, Any]: 46 id = self.id 47 48 name = self.name 49 50 organization = self.organization 51 52 contacts = [] 53 for contacts_item_data in self.contacts: 54 contacts_item = contacts_item_data.to_dict() 55 contacts.append(contacts_item) 56 57 customer_type = self.customer_type.value 58 59 billing_method = self.billing_method.value 60 61 primary_budget_number = self.primary_budget_number 62 63 owner = self.owner 64 65 shared_with = self.shared_with 66 67 is_archived = self.is_archived 68 69 field_dict: Dict[str, Any] = {} 70 field_dict.update(self.additional_properties) 71 field_dict.update( 72 { 73 "id": id, 74 "name": name, 75 "organization": organization, 76 "contacts": contacts, 77 "customerType": customer_type, 78 "billingMethod": billing_method, 79 "primaryBudgetNumber": primary_budget_number, 80 "owner": owner, 81 "sharedWith": shared_with, 82 "isArchived": is_archived, 83 } 84 ) 85 86 return field_dict
88 @classmethod 89 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 90 from ..models.contact import Contact 91 92 d = src_dict.copy() 93 id = d.pop("id") 94 95 name = d.pop("name") 96 97 organization = d.pop("organization") 98 99 contacts = [] 100 _contacts = d.pop("contacts") 101 for contacts_item_data in _contacts: 102 contacts_item = Contact.from_dict(contacts_item_data) 103 104 contacts.append(contacts_item) 105 106 customer_type = CustomerType(d.pop("customerType")) 107 108 billing_method = BillingMethod(d.pop("billingMethod")) 109 110 primary_budget_number = d.pop("primaryBudgetNumber") 111 112 owner = d.pop("owner") 113 114 shared_with = cast(List[str], d.pop("sharedWith")) 115 116 is_archived = d.pop("isArchived") 117 118 billing_account = cls( 119 id=id, 120 name=name, 121 organization=organization, 122 contacts=contacts, 123 customer_type=customer_type, 124 billing_method=billing_method, 125 primary_budget_number=primary_budget_number, 126 owner=owner, 127 shared_with=shared_with, 128 is_archived=is_archived, 129 ) 130 131 billing_account.additional_properties = d 132 return billing_account
17@_attrs_define 18class BillingAccountRequest: 19 """ 20 Attributes: 21 name (str): 22 contacts (List['Contact']): 23 customer_type (CustomerType): 24 billing_method (BillingMethod): 25 primary_budget_number (str): 26 owner (str): 27 shared_with (List[str]): 28 """ 29 30 name: str 31 contacts: List["Contact"] 32 customer_type: CustomerType 33 billing_method: BillingMethod 34 primary_budget_number: str 35 owner: str 36 shared_with: List[str] 37 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 38 39 def to_dict(self) -> Dict[str, Any]: 40 name = self.name 41 42 contacts = [] 43 for contacts_item_data in self.contacts: 44 contacts_item = contacts_item_data.to_dict() 45 contacts.append(contacts_item) 46 47 customer_type = self.customer_type.value 48 49 billing_method = self.billing_method.value 50 51 primary_budget_number = self.primary_budget_number 52 53 owner = self.owner 54 55 shared_with = self.shared_with 56 57 field_dict: Dict[str, Any] = {} 58 field_dict.update(self.additional_properties) 59 field_dict.update( 60 { 61 "name": name, 62 "contacts": contacts, 63 "customerType": customer_type, 64 "billingMethod": billing_method, 65 "primaryBudgetNumber": primary_budget_number, 66 "owner": owner, 67 "sharedWith": shared_with, 68 } 69 ) 70 71 return field_dict 72 73 @classmethod 74 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 75 from ..models.contact import Contact 76 77 d = src_dict.copy() 78 name = d.pop("name") 79 80 contacts = [] 81 _contacts = d.pop("contacts") 82 for contacts_item_data in _contacts: 83 contacts_item = Contact.from_dict(contacts_item_data) 84 85 contacts.append(contacts_item) 86 87 customer_type = CustomerType(d.pop("customerType")) 88 89 billing_method = BillingMethod(d.pop("billingMethod")) 90 91 primary_budget_number = d.pop("primaryBudgetNumber") 92 93 owner = d.pop("owner") 94 95 shared_with = cast(List[str], d.pop("sharedWith")) 96 97 billing_account_request = cls( 98 name=name, 99 contacts=contacts, 100 customer_type=customer_type, 101 billing_method=billing_method, 102 primary_budget_number=primary_budget_number, 103 owner=owner, 104 shared_with=shared_with, 105 ) 106 107 billing_account_request.additional_properties = d 108 return billing_account_request 109 110 @property 111 def additional_keys(self) -> List[str]: 112 return list(self.additional_properties.keys())
Attributes:
- name (str):
- contacts (List['Contact']):
- customer_type (CustomerType):
- billing_method (BillingMethod):
- primary_budget_number (str):
- owner (str):
- shared_with (List[str]):
30def __init__(self, name, contacts, customer_type, billing_method, primary_budget_number, owner, shared_with): 31 self.name = name 32 self.contacts = contacts 33 self.customer_type = customer_type 34 self.billing_method = billing_method 35 self.primary_budget_number = primary_budget_number 36 self.owner = owner 37 self.shared_with = shared_with 38 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class BillingAccountRequest.
39 def to_dict(self) -> Dict[str, Any]: 40 name = self.name 41 42 contacts = [] 43 for contacts_item_data in self.contacts: 44 contacts_item = contacts_item_data.to_dict() 45 contacts.append(contacts_item) 46 47 customer_type = self.customer_type.value 48 49 billing_method = self.billing_method.value 50 51 primary_budget_number = self.primary_budget_number 52 53 owner = self.owner 54 55 shared_with = self.shared_with 56 57 field_dict: Dict[str, Any] = {} 58 field_dict.update(self.additional_properties) 59 field_dict.update( 60 { 61 "name": name, 62 "contacts": contacts, 63 "customerType": customer_type, 64 "billingMethod": billing_method, 65 "primaryBudgetNumber": primary_budget_number, 66 "owner": owner, 67 "sharedWith": shared_with, 68 } 69 ) 70 71 return field_dict
73 @classmethod 74 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 75 from ..models.contact import Contact 76 77 d = src_dict.copy() 78 name = d.pop("name") 79 80 contacts = [] 81 _contacts = d.pop("contacts") 82 for contacts_item_data in _contacts: 83 contacts_item = Contact.from_dict(contacts_item_data) 84 85 contacts.append(contacts_item) 86 87 customer_type = CustomerType(d.pop("customerType")) 88 89 billing_method = BillingMethod(d.pop("billingMethod")) 90 91 primary_budget_number = d.pop("primaryBudgetNumber") 92 93 owner = d.pop("owner") 94 95 shared_with = cast(List[str], d.pop("sharedWith")) 96 97 billing_account_request = cls( 98 name=name, 99 contacts=contacts, 100 customer_type=customer_type, 101 billing_method=billing_method, 102 primary_budget_number=primary_budget_number, 103 owner=owner, 104 shared_with=shared_with, 105 ) 106 107 billing_account_request.additional_properties = d 108 return billing_account_request
5class BillingMethod(str, Enum): 6 BUDGET_NUMBER = "BUDGET_NUMBER" 7 CREDIT = "CREDIT" 8 PURCHASE_ORDER = "PURCHASE_ORDER" 9 UNKNOWN = "UNKNOWN" 10 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 11 12 def __str__(self) -> str: 13 return str(self.value) 14 15 @classmethod 16 def _missing_(cls, number): 17 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
5class BudgetPeriod(str, Enum): 6 ANNUALLY = "ANNUALLY" 7 MONTHLY = "MONTHLY" 8 QUARTERLY = "QUARTERLY" 9 UNKNOWN = "UNKNOWN" 10 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 11 12 def __str__(self) -> str: 13 return str(self.value) 14 15 @classmethod 16 def _missing_(cls, number): 17 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
10@_attrs_define 11class CalculatePipelineCostRequest: 12 """ """ 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 calculate_pipeline_cost_request = cls() 27 28 calculate_pipeline_cost_request.additional_properties = d 29 return calculate_pipeline_cost_request 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
10@_attrs_define 11class ClassificationInput: 12 """ 13 Attributes: 14 name (str): 15 description (str): 16 requirement_ids (List[str]): 17 """ 18 19 name: str 20 description: str 21 requirement_ids: List[str] 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 name = self.name 26 27 description = self.description 28 29 requirement_ids = self.requirement_ids 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "name": name, 36 "description": description, 37 "requirementIds": requirement_ids, 38 } 39 ) 40 41 return field_dict 42 43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 name = d.pop("name") 47 48 description = d.pop("description") 49 50 requirement_ids = cast(List[str], d.pop("requirementIds")) 51 52 classification_input = cls( 53 name=name, 54 description=description, 55 requirement_ids=requirement_ids, 56 ) 57 58 classification_input.additional_properties = d 59 return classification_input 60 61 @property 62 def additional_keys(self) -> List[str]: 63 return list(self.additional_properties.keys())
Attributes:
- name (str):
- description (str):
- requirement_ids (List[str]):
26def __init__(self, name, description, requirement_ids): 27 self.name = name 28 self.description = description 29 self.requirement_ids = requirement_ids 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ClassificationInput.
24 def to_dict(self) -> Dict[str, Any]: 25 name = self.name 26 27 description = self.description 28 29 requirement_ids = self.requirement_ids 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "name": name, 36 "description": description, 37 "requirementIds": requirement_ids, 38 } 39 ) 40 41 return field_dict
43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 name = d.pop("name") 47 48 description = d.pop("description") 49 50 requirement_ids = cast(List[str], d.pop("requirementIds")) 51 52 classification_input = cls( 53 name=name, 54 description=description, 55 requirement_ids=requirement_ids, 56 ) 57 58 classification_input.additional_properties = d 59 return classification_input
13@_attrs_define 14class CloudAccount: 15 """ 16 Attributes: 17 account_type (CloudAccountType): Type of cloud account (Hosted by Cirro, or Bring your own account) 18 account_id (Union[Unset, str]): AWS Account ID 19 account_name (Union[Unset, str]): Name used to describe the account, useful when the account hosts multiple 20 projects 21 region_name (Union[Unset, str]): AWS Region Code (defaults to region of Cirro app) Example: us-west-2. 22 """ 23 24 account_type: CloudAccountType 25 account_id: Union[Unset, str] = UNSET 26 account_name: Union[Unset, str] = UNSET 27 region_name: Union[Unset, str] = UNSET 28 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 29 30 def to_dict(self) -> Dict[str, Any]: 31 account_type = self.account_type.value 32 33 account_id = self.account_id 34 35 account_name = self.account_name 36 37 region_name = self.region_name 38 39 field_dict: Dict[str, Any] = {} 40 field_dict.update(self.additional_properties) 41 field_dict.update( 42 { 43 "accountType": account_type, 44 } 45 ) 46 if account_id is not UNSET: 47 field_dict["accountId"] = account_id 48 if account_name is not UNSET: 49 field_dict["accountName"] = account_name 50 if region_name is not UNSET: 51 field_dict["regionName"] = region_name 52 53 return field_dict 54 55 @classmethod 56 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 57 d = src_dict.copy() 58 account_type = CloudAccountType(d.pop("accountType")) 59 60 account_id = d.pop("accountId", UNSET) 61 62 account_name = d.pop("accountName", UNSET) 63 64 region_name = d.pop("regionName", UNSET) 65 66 cloud_account = cls( 67 account_type=account_type, 68 account_id=account_id, 69 account_name=account_name, 70 region_name=region_name, 71 ) 72 73 cloud_account.additional_properties = d 74 return cloud_account 75 76 @property 77 def additional_keys(self) -> List[str]: 78 return list(self.additional_properties.keys())
Attributes:
- account_type (CloudAccountType): Type of cloud account (Hosted by Cirro, or Bring your own account)
- account_id (Union[Unset, str]): AWS Account ID
- account_name (Union[Unset, str]): Name used to describe the account, useful when the account hosts multiple projects
- region_name (Union[Unset, str]): AWS Region Code (defaults to region of Cirro app) Example: us-west-2.
27def __init__(self, account_type, account_id=attr_dict['account_id'].default, account_name=attr_dict['account_name'].default, region_name=attr_dict['region_name'].default): 28 self.account_type = account_type 29 self.account_id = account_id 30 self.account_name = account_name 31 self.region_name = region_name 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CloudAccount.
30 def to_dict(self) -> Dict[str, Any]: 31 account_type = self.account_type.value 32 33 account_id = self.account_id 34 35 account_name = self.account_name 36 37 region_name = self.region_name 38 39 field_dict: Dict[str, Any] = {} 40 field_dict.update(self.additional_properties) 41 field_dict.update( 42 { 43 "accountType": account_type, 44 } 45 ) 46 if account_id is not UNSET: 47 field_dict["accountId"] = account_id 48 if account_name is not UNSET: 49 field_dict["accountName"] = account_name 50 if region_name is not UNSET: 51 field_dict["regionName"] = region_name 52 53 return field_dict
55 @classmethod 56 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 57 d = src_dict.copy() 58 account_type = CloudAccountType(d.pop("accountType")) 59 60 account_id = d.pop("accountId", UNSET) 61 62 account_name = d.pop("accountName", UNSET) 63 64 region_name = d.pop("regionName", UNSET) 65 66 cloud_account = cls( 67 account_type=account_type, 68 account_id=account_id, 69 account_name=account_name, 70 region_name=region_name, 71 ) 72 73 cloud_account.additional_properties = d 74 return cloud_account
5class CloudAccountType(str, Enum): 6 BYOA = "BYOA" 7 HOSTED = "HOSTED" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
12@_attrs_define 13class ColumnDefinition: 14 """ 15 Attributes: 16 col (Union[Unset, str]): Column name in asset file 17 name (Union[Unset, str]): User-friendly column name 18 desc (Union[Unset, str]): Description of the column 19 """ 20 21 col: Union[Unset, str] = UNSET 22 name: Union[Unset, str] = UNSET 23 desc: Union[Unset, str] = UNSET 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 col = self.col 28 29 name = self.name 30 31 desc = self.desc 32 33 field_dict: Dict[str, Any] = {} 34 field_dict.update(self.additional_properties) 35 field_dict.update({}) 36 if col is not UNSET: 37 field_dict["col"] = col 38 if name is not UNSET: 39 field_dict["name"] = name 40 if desc is not UNSET: 41 field_dict["desc"] = desc 42 43 return field_dict 44 45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 d = src_dict.copy() 48 col = d.pop("col", UNSET) 49 50 name = d.pop("name", UNSET) 51 52 desc = d.pop("desc", UNSET) 53 54 column_definition = cls( 55 col=col, 56 name=name, 57 desc=desc, 58 ) 59 60 column_definition.additional_properties = d 61 return column_definition 62 63 @property 64 def additional_keys(self) -> List[str]: 65 return list(self.additional_properties.keys())
Attributes:
- col (Union[Unset, str]): Column name in asset file
- name (Union[Unset, str]): User-friendly column name
- desc (Union[Unset, str]): Description of the column
26def __init__(self, col=attr_dict['col'].default, name=attr_dict['name'].default, desc=attr_dict['desc'].default): 27 self.col = col 28 self.name = name 29 self.desc = desc 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ColumnDefinition.
26 def to_dict(self) -> Dict[str, Any]: 27 col = self.col 28 29 name = self.name 30 31 desc = self.desc 32 33 field_dict: Dict[str, Any] = {} 34 field_dict.update(self.additional_properties) 35 field_dict.update({}) 36 if col is not UNSET: 37 field_dict["col"] = col 38 if name is not UNSET: 39 field_dict["name"] = name 40 if desc is not UNSET: 41 field_dict["desc"] = desc 42 43 return field_dict
45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 d = src_dict.copy() 48 col = d.pop("col", UNSET) 49 50 name = d.pop("name", UNSET) 51 52 desc = d.pop("desc", UNSET) 53 54 column_definition = cls( 55 col=col, 56 name=name, 57 desc=desc, 58 ) 59 60 column_definition.additional_properties = d 61 return column_definition
20@_attrs_define 21class ComputeEnvironmentConfiguration: 22 """ 23 Attributes: 24 environment_type (EnvironmentType): The type of compute environment 25 created_at (datetime.datetime): 26 updated_at (datetime.datetime): 27 id (Union[Unset, str]): The unique ID of the environment 28 name (Union[Unset, str]): The display name of the environment 29 properties (Union[Unset, ComputeEnvironmentConfigurationProperties]): Configuration properties passed to the 30 environment 31 agent (Union['Agent', None, Unset]): 32 created_by (Union[Unset, str]): The user who created the environment 33 """ 34 35 environment_type: EnvironmentType 36 created_at: datetime.datetime 37 updated_at: datetime.datetime 38 id: Union[Unset, str] = UNSET 39 name: Union[Unset, str] = UNSET 40 properties: Union[Unset, "ComputeEnvironmentConfigurationProperties"] = UNSET 41 agent: Union["Agent", None, Unset] = UNSET 42 created_by: Union[Unset, str] = UNSET 43 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 44 45 def to_dict(self) -> Dict[str, Any]: 46 from ..models.agent import Agent 47 48 environment_type = self.environment_type.value 49 50 created_at = self.created_at.isoformat() 51 52 updated_at = self.updated_at.isoformat() 53 54 id = self.id 55 56 name = self.name 57 58 properties: Union[Unset, Dict[str, Any]] = UNSET 59 if not isinstance(self.properties, Unset): 60 properties = self.properties.to_dict() 61 62 agent: Union[Dict[str, Any], None, Unset] 63 if isinstance(self.agent, Unset): 64 agent = UNSET 65 elif isinstance(self.agent, Agent): 66 agent = self.agent.to_dict() 67 else: 68 agent = self.agent 69 70 created_by = self.created_by 71 72 field_dict: Dict[str, Any] = {} 73 field_dict.update(self.additional_properties) 74 field_dict.update( 75 { 76 "environmentType": environment_type, 77 "createdAt": created_at, 78 "updatedAt": updated_at, 79 } 80 ) 81 if id is not UNSET: 82 field_dict["id"] = id 83 if name is not UNSET: 84 field_dict["name"] = name 85 if properties is not UNSET: 86 field_dict["properties"] = properties 87 if agent is not UNSET: 88 field_dict["agent"] = agent 89 if created_by is not UNSET: 90 field_dict["createdBy"] = created_by 91 92 return field_dict 93 94 @classmethod 95 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 96 from ..models.agent import Agent 97 from ..models.compute_environment_configuration_properties import ComputeEnvironmentConfigurationProperties 98 99 d = src_dict.copy() 100 environment_type = EnvironmentType(d.pop("environmentType")) 101 102 created_at = isoparse(d.pop("createdAt")) 103 104 updated_at = isoparse(d.pop("updatedAt")) 105 106 id = d.pop("id", UNSET) 107 108 name = d.pop("name", UNSET) 109 110 _properties = d.pop("properties", UNSET) 111 properties: Union[Unset, ComputeEnvironmentConfigurationProperties] 112 if isinstance(_properties, Unset): 113 properties = UNSET 114 else: 115 properties = ComputeEnvironmentConfigurationProperties.from_dict(_properties) 116 117 def _parse_agent(data: object) -> Union["Agent", None, Unset]: 118 if data is None: 119 return data 120 if isinstance(data, Unset): 121 return data 122 try: 123 if not isinstance(data, dict): 124 raise TypeError() 125 agent_type_1 = Agent.from_dict(data) 126 127 return agent_type_1 128 except: # noqa: E722 129 pass 130 return cast(Union["Agent", None, Unset], data) 131 132 agent = _parse_agent(d.pop("agent", UNSET)) 133 134 created_by = d.pop("createdBy", UNSET) 135 136 compute_environment_configuration = cls( 137 environment_type=environment_type, 138 created_at=created_at, 139 updated_at=updated_at, 140 id=id, 141 name=name, 142 properties=properties, 143 agent=agent, 144 created_by=created_by, 145 ) 146 147 compute_environment_configuration.additional_properties = d 148 return compute_environment_configuration 149 150 @property 151 def additional_keys(self) -> List[str]: 152 return list(self.additional_properties.keys())
Attributes:
- environment_type (EnvironmentType): The type of compute environment
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- id (Union[Unset, str]): The unique ID of the environment
- name (Union[Unset, str]): The display name of the environment
- properties (Union[Unset, ComputeEnvironmentConfigurationProperties]): Configuration properties passed to the environment
- agent (Union['Agent', None, Unset]):
- created_by (Union[Unset, str]): The user who created the environment
31def __init__(self, environment_type, created_at, updated_at, id=attr_dict['id'].default, name=attr_dict['name'].default, properties=attr_dict['properties'].default, agent=attr_dict['agent'].default, created_by=attr_dict['created_by'].default): 32 self.environment_type = environment_type 33 self.created_at = created_at 34 self.updated_at = updated_at 35 self.id = id 36 self.name = name 37 self.properties = properties 38 self.agent = agent 39 self.created_by = created_by 40 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ComputeEnvironmentConfiguration.
45 def to_dict(self) -> Dict[str, Any]: 46 from ..models.agent import Agent 47 48 environment_type = self.environment_type.value 49 50 created_at = self.created_at.isoformat() 51 52 updated_at = self.updated_at.isoformat() 53 54 id = self.id 55 56 name = self.name 57 58 properties: Union[Unset, Dict[str, Any]] = UNSET 59 if not isinstance(self.properties, Unset): 60 properties = self.properties.to_dict() 61 62 agent: Union[Dict[str, Any], None, Unset] 63 if isinstance(self.agent, Unset): 64 agent = UNSET 65 elif isinstance(self.agent, Agent): 66 agent = self.agent.to_dict() 67 else: 68 agent = self.agent 69 70 created_by = self.created_by 71 72 field_dict: Dict[str, Any] = {} 73 field_dict.update(self.additional_properties) 74 field_dict.update( 75 { 76 "environmentType": environment_type, 77 "createdAt": created_at, 78 "updatedAt": updated_at, 79 } 80 ) 81 if id is not UNSET: 82 field_dict["id"] = id 83 if name is not UNSET: 84 field_dict["name"] = name 85 if properties is not UNSET: 86 field_dict["properties"] = properties 87 if agent is not UNSET: 88 field_dict["agent"] = agent 89 if created_by is not UNSET: 90 field_dict["createdBy"] = created_by 91 92 return field_dict
94 @classmethod 95 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 96 from ..models.agent import Agent 97 from ..models.compute_environment_configuration_properties import ComputeEnvironmentConfigurationProperties 98 99 d = src_dict.copy() 100 environment_type = EnvironmentType(d.pop("environmentType")) 101 102 created_at = isoparse(d.pop("createdAt")) 103 104 updated_at = isoparse(d.pop("updatedAt")) 105 106 id = d.pop("id", UNSET) 107 108 name = d.pop("name", UNSET) 109 110 _properties = d.pop("properties", UNSET) 111 properties: Union[Unset, ComputeEnvironmentConfigurationProperties] 112 if isinstance(_properties, Unset): 113 properties = UNSET 114 else: 115 properties = ComputeEnvironmentConfigurationProperties.from_dict(_properties) 116 117 def _parse_agent(data: object) -> Union["Agent", None, Unset]: 118 if data is None: 119 return data 120 if isinstance(data, Unset): 121 return data 122 try: 123 if not isinstance(data, dict): 124 raise TypeError() 125 agent_type_1 = Agent.from_dict(data) 126 127 return agent_type_1 128 except: # noqa: E722 129 pass 130 return cast(Union["Agent", None, Unset], data) 131 132 agent = _parse_agent(d.pop("agent", UNSET)) 133 134 created_by = d.pop("createdBy", UNSET) 135 136 compute_environment_configuration = cls( 137 environment_type=environment_type, 138 created_at=created_at, 139 updated_at=updated_at, 140 id=id, 141 name=name, 142 properties=properties, 143 agent=agent, 144 created_by=created_by, 145 ) 146 147 compute_environment_configuration.additional_properties = d 148 return compute_environment_configuration
18@_attrs_define 19class ComputeEnvironmentConfigurationInput: 20 """ 21 Attributes: 22 name (str): 23 agent_id (Union[None, Unset, str]): 24 properties (Union['ComputeEnvironmentConfigurationInputProperties', None, Unset]): 25 """ 26 27 name: str 28 agent_id: Union[None, Unset, str] = UNSET 29 properties: Union["ComputeEnvironmentConfigurationInputProperties", None, Unset] = UNSET 30 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 31 32 def to_dict(self) -> Dict[str, Any]: 33 from ..models.compute_environment_configuration_input_properties import ( 34 ComputeEnvironmentConfigurationInputProperties, 35 ) 36 37 name = self.name 38 39 agent_id: Union[None, Unset, str] 40 if isinstance(self.agent_id, Unset): 41 agent_id = UNSET 42 else: 43 agent_id = self.agent_id 44 45 properties: Union[Dict[str, Any], None, Unset] 46 if isinstance(self.properties, Unset): 47 properties = UNSET 48 elif isinstance(self.properties, ComputeEnvironmentConfigurationInputProperties): 49 properties = self.properties.to_dict() 50 else: 51 properties = self.properties 52 53 field_dict: Dict[str, Any] = {} 54 field_dict.update(self.additional_properties) 55 field_dict.update( 56 { 57 "name": name, 58 } 59 ) 60 if agent_id is not UNSET: 61 field_dict["agentId"] = agent_id 62 if properties is not UNSET: 63 field_dict["properties"] = properties 64 65 return field_dict 66 67 @classmethod 68 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 69 from ..models.compute_environment_configuration_input_properties import ( 70 ComputeEnvironmentConfigurationInputProperties, 71 ) 72 73 d = src_dict.copy() 74 name = d.pop("name") 75 76 def _parse_agent_id(data: object) -> Union[None, Unset, str]: 77 if data is None: 78 return data 79 if isinstance(data, Unset): 80 return data 81 return cast(Union[None, Unset, str], data) 82 83 agent_id = _parse_agent_id(d.pop("agentId", UNSET)) 84 85 def _parse_properties(data: object) -> Union["ComputeEnvironmentConfigurationInputProperties", None, Unset]: 86 if data is None: 87 return data 88 if isinstance(data, Unset): 89 return data 90 try: 91 if not isinstance(data, dict): 92 raise TypeError() 93 properties_type_0 = ComputeEnvironmentConfigurationInputProperties.from_dict(data) 94 95 return properties_type_0 96 except: # noqa: E722 97 pass 98 return cast(Union["ComputeEnvironmentConfigurationInputProperties", None, Unset], data) 99 100 properties = _parse_properties(d.pop("properties", UNSET)) 101 102 compute_environment_configuration_input = cls( 103 name=name, 104 agent_id=agent_id, 105 properties=properties, 106 ) 107 108 compute_environment_configuration_input.additional_properties = d 109 return compute_environment_configuration_input 110 111 @property 112 def additional_keys(self) -> List[str]: 113 return list(self.additional_properties.keys())
Attributes:
- name (str):
- agent_id (Union[None, Unset, str]):
- properties (Union['ComputeEnvironmentConfigurationInputProperties', None, Unset]):
26def __init__(self, name, agent_id=attr_dict['agent_id'].default, properties=attr_dict['properties'].default): 27 self.name = name 28 self.agent_id = agent_id 29 self.properties = properties 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ComputeEnvironmentConfigurationInput.
32 def to_dict(self) -> Dict[str, Any]: 33 from ..models.compute_environment_configuration_input_properties import ( 34 ComputeEnvironmentConfigurationInputProperties, 35 ) 36 37 name = self.name 38 39 agent_id: Union[None, Unset, str] 40 if isinstance(self.agent_id, Unset): 41 agent_id = UNSET 42 else: 43 agent_id = self.agent_id 44 45 properties: Union[Dict[str, Any], None, Unset] 46 if isinstance(self.properties, Unset): 47 properties = UNSET 48 elif isinstance(self.properties, ComputeEnvironmentConfigurationInputProperties): 49 properties = self.properties.to_dict() 50 else: 51 properties = self.properties 52 53 field_dict: Dict[str, Any] = {} 54 field_dict.update(self.additional_properties) 55 field_dict.update( 56 { 57 "name": name, 58 } 59 ) 60 if agent_id is not UNSET: 61 field_dict["agentId"] = agent_id 62 if properties is not UNSET: 63 field_dict["properties"] = properties 64 65 return field_dict
67 @classmethod 68 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 69 from ..models.compute_environment_configuration_input_properties import ( 70 ComputeEnvironmentConfigurationInputProperties, 71 ) 72 73 d = src_dict.copy() 74 name = d.pop("name") 75 76 def _parse_agent_id(data: object) -> Union[None, Unset, str]: 77 if data is None: 78 return data 79 if isinstance(data, Unset): 80 return data 81 return cast(Union[None, Unset, str], data) 82 83 agent_id = _parse_agent_id(d.pop("agentId", UNSET)) 84 85 def _parse_properties(data: object) -> Union["ComputeEnvironmentConfigurationInputProperties", None, Unset]: 86 if data is None: 87 return data 88 if isinstance(data, Unset): 89 return data 90 try: 91 if not isinstance(data, dict): 92 raise TypeError() 93 properties_type_0 = ComputeEnvironmentConfigurationInputProperties.from_dict(data) 94 95 return properties_type_0 96 except: # noqa: E722 97 pass 98 return cast(Union["ComputeEnvironmentConfigurationInputProperties", None, Unset], data) 99 100 properties = _parse_properties(d.pop("properties", UNSET)) 101 102 compute_environment_configuration_input = cls( 103 name=name, 104 agent_id=agent_id, 105 properties=properties, 106 ) 107 108 compute_environment_configuration_input.additional_properties = d 109 return compute_environment_configuration_input
10@_attrs_define 11class ComputeEnvironmentConfigurationInputProperties: 12 """ """ 13 14 additional_properties: Dict[str, str] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 compute_environment_configuration_input_properties = cls() 27 28 compute_environment_configuration_input_properties.additional_properties = d 29 return compute_environment_configuration_input_properties 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
Method generated by attrs for class ComputeEnvironmentConfigurationInputProperties.
23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 compute_environment_configuration_input_properties = cls() 27 28 compute_environment_configuration_input_properties.additional_properties = d 29 return compute_environment_configuration_input_properties
10@_attrs_define 11class ComputeEnvironmentConfigurationProperties: 12 """Configuration properties passed to the environment""" 13 14 additional_properties: Dict[str, str] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 compute_environment_configuration_properties = cls() 27 28 compute_environment_configuration_properties.additional_properties = d 29 return compute_environment_configuration_properties 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
Configuration properties passed to the environment
Method generated by attrs for class ComputeEnvironmentConfigurationProperties.
10@_attrs_define 11class Contact: 12 """ 13 Attributes: 14 name (str): 15 organization (str): 16 email (str): 17 phone (str): 18 """ 19 20 name: str 21 organization: str 22 email: str 23 phone: str 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 name = self.name 28 29 organization = self.organization 30 31 email = self.email 32 33 phone = self.phone 34 35 field_dict: Dict[str, Any] = {} 36 field_dict.update(self.additional_properties) 37 field_dict.update( 38 { 39 "name": name, 40 "organization": organization, 41 "email": email, 42 "phone": phone, 43 } 44 ) 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 50 d = src_dict.copy() 51 name = d.pop("name") 52 53 organization = d.pop("organization") 54 55 email = d.pop("email") 56 57 phone = d.pop("phone") 58 59 contact = cls( 60 name=name, 61 organization=organization, 62 email=email, 63 phone=phone, 64 ) 65 66 contact.additional_properties = d 67 return contact 68 69 @property 70 def additional_keys(self) -> List[str]: 71 return list(self.additional_properties.keys())
Attributes:
- name (str):
- organization (str):
- email (str):
- phone (str):
27def __init__(self, name, organization, email, phone): 28 self.name = name 29 self.organization = organization 30 self.email = email 31 self.phone = phone 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Contact.
26 def to_dict(self) -> Dict[str, Any]: 27 name = self.name 28 29 organization = self.organization 30 31 email = self.email 32 33 phone = self.phone 34 35 field_dict: Dict[str, Any] = {} 36 field_dict.update(self.additional_properties) 37 field_dict.update( 38 { 39 "name": name, 40 "organization": organization, 41 "email": email, 42 "phone": phone, 43 } 44 ) 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 50 d = src_dict.copy() 51 name = d.pop("name") 52 53 organization = d.pop("organization") 54 55 email = d.pop("email") 56 57 phone = d.pop("phone") 58 59 contact = cls( 60 name=name, 61 organization=organization, 62 email=email, 63 phone=phone, 64 ) 65 66 contact.additional_properties = d 67 return contact
10@_attrs_define 11class ContactInput: 12 """ 13 Attributes: 14 title (str): 15 description (str): 16 name (str): 17 phone (str): 18 email (str): 19 """ 20 21 title: str 22 description: str 23 name: str 24 phone: str 25 email: str 26 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 27 28 def to_dict(self) -> Dict[str, Any]: 29 title = self.title 30 31 description = self.description 32 33 name = self.name 34 35 phone = self.phone 36 37 email = self.email 38 39 field_dict: Dict[str, Any] = {} 40 field_dict.update(self.additional_properties) 41 field_dict.update( 42 { 43 "title": title, 44 "description": description, 45 "name": name, 46 "phone": phone, 47 "email": email, 48 } 49 ) 50 51 return field_dict 52 53 @classmethod 54 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 55 d = src_dict.copy() 56 title = d.pop("title") 57 58 description = d.pop("description") 59 60 name = d.pop("name") 61 62 phone = d.pop("phone") 63 64 email = d.pop("email") 65 66 contact_input = cls( 67 title=title, 68 description=description, 69 name=name, 70 phone=phone, 71 email=email, 72 ) 73 74 contact_input.additional_properties = d 75 return contact_input 76 77 @property 78 def additional_keys(self) -> List[str]: 79 return list(self.additional_properties.keys())
Attributes:
- title (str):
- description (str):
- name (str):
- phone (str):
- email (str):
28def __init__(self, title, description, name, phone, email): 29 self.title = title 30 self.description = description 31 self.name = name 32 self.phone = phone 33 self.email = email 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ContactInput.
28 def to_dict(self) -> Dict[str, Any]: 29 title = self.title 30 31 description = self.description 32 33 name = self.name 34 35 phone = self.phone 36 37 email = self.email 38 39 field_dict: Dict[str, Any] = {} 40 field_dict.update(self.additional_properties) 41 field_dict.update( 42 { 43 "title": title, 44 "description": description, 45 "name": name, 46 "phone": phone, 47 "email": email, 48 } 49 ) 50 51 return field_dict
53 @classmethod 54 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 55 d = src_dict.copy() 56 title = d.pop("title") 57 58 description = d.pop("description") 59 60 name = d.pop("name") 61 62 phone = d.pop("phone") 63 64 email = d.pop("email") 65 66 contact_input = cls( 67 title=title, 68 description=description, 69 name=name, 70 phone=phone, 71 email=email, 72 ) 73 74 contact_input.additional_properties = d 75 return contact_input
12@_attrs_define 13class CreateNotebookInstanceRequest: 14 """ 15 Attributes: 16 name (str): 17 instance_type (str): AWS EC2 Instance Type (see list of available options) Example: ml.t3.medium. 18 accelerator_types (List[str]): 19 volume_size_gb (int): 20 git_repositories (Union[List[str], None, Unset]): List of public git repositories to clone into the notebook 21 instance. 22 is_shared_with_project (Union[Unset, bool]): Whether the notebook is shared with the project Default: False. 23 """ 24 25 name: str 26 instance_type: str 27 accelerator_types: List[str] 28 volume_size_gb: int 29 git_repositories: Union[List[str], None, Unset] = UNSET 30 is_shared_with_project: Union[Unset, bool] = False 31 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 32 33 def to_dict(self) -> Dict[str, Any]: 34 name = self.name 35 36 instance_type = self.instance_type 37 38 accelerator_types = self.accelerator_types 39 40 volume_size_gb = self.volume_size_gb 41 42 git_repositories: Union[List[str], None, Unset] 43 if isinstance(self.git_repositories, Unset): 44 git_repositories = UNSET 45 elif isinstance(self.git_repositories, list): 46 git_repositories = self.git_repositories 47 48 else: 49 git_repositories = self.git_repositories 50 51 is_shared_with_project = self.is_shared_with_project 52 53 field_dict: Dict[str, Any] = {} 54 field_dict.update(self.additional_properties) 55 field_dict.update( 56 { 57 "name": name, 58 "instanceType": instance_type, 59 "acceleratorTypes": accelerator_types, 60 "volumeSizeGB": volume_size_gb, 61 } 62 ) 63 if git_repositories is not UNSET: 64 field_dict["gitRepositories"] = git_repositories 65 if is_shared_with_project is not UNSET: 66 field_dict["isSharedWithProject"] = is_shared_with_project 67 68 return field_dict 69 70 @classmethod 71 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 72 d = src_dict.copy() 73 name = d.pop("name") 74 75 instance_type = d.pop("instanceType") 76 77 accelerator_types = cast(List[str], d.pop("acceleratorTypes")) 78 79 volume_size_gb = d.pop("volumeSizeGB") 80 81 def _parse_git_repositories(data: object) -> Union[List[str], None, Unset]: 82 if data is None: 83 return data 84 if isinstance(data, Unset): 85 return data 86 try: 87 if not isinstance(data, list): 88 raise TypeError() 89 git_repositories_type_0 = cast(List[str], data) 90 91 return git_repositories_type_0 92 except: # noqa: E722 93 pass 94 return cast(Union[List[str], None, Unset], data) 95 96 git_repositories = _parse_git_repositories(d.pop("gitRepositories", UNSET)) 97 98 is_shared_with_project = d.pop("isSharedWithProject", UNSET) 99 100 create_notebook_instance_request = cls( 101 name=name, 102 instance_type=instance_type, 103 accelerator_types=accelerator_types, 104 volume_size_gb=volume_size_gb, 105 git_repositories=git_repositories, 106 is_shared_with_project=is_shared_with_project, 107 ) 108 109 create_notebook_instance_request.additional_properties = d 110 return create_notebook_instance_request 111 112 @property 113 def additional_keys(self) -> List[str]: 114 return list(self.additional_properties.keys())
Attributes:
- name (str):
- instance_type (str): AWS EC2 Instance Type (see list of available options) Example: ml.t3.medium.
- accelerator_types (List[str]):
- volume_size_gb (int):
- git_repositories (Union[List[str], None, Unset]): List of public git repositories to clone into the notebook instance.
- is_shared_with_project (Union[Unset, bool]): Whether the notebook is shared with the project Default: False.
29def __init__(self, name, instance_type, accelerator_types, volume_size_gb, git_repositories=attr_dict['git_repositories'].default, is_shared_with_project=attr_dict['is_shared_with_project'].default): 30 self.name = name 31 self.instance_type = instance_type 32 self.accelerator_types = accelerator_types 33 self.volume_size_gb = volume_size_gb 34 self.git_repositories = git_repositories 35 self.is_shared_with_project = is_shared_with_project 36 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CreateNotebookInstanceRequest.
33 def to_dict(self) -> Dict[str, Any]: 34 name = self.name 35 36 instance_type = self.instance_type 37 38 accelerator_types = self.accelerator_types 39 40 volume_size_gb = self.volume_size_gb 41 42 git_repositories: Union[List[str], None, Unset] 43 if isinstance(self.git_repositories, Unset): 44 git_repositories = UNSET 45 elif isinstance(self.git_repositories, list): 46 git_repositories = self.git_repositories 47 48 else: 49 git_repositories = self.git_repositories 50 51 is_shared_with_project = self.is_shared_with_project 52 53 field_dict: Dict[str, Any] = {} 54 field_dict.update(self.additional_properties) 55 field_dict.update( 56 { 57 "name": name, 58 "instanceType": instance_type, 59 "acceleratorTypes": accelerator_types, 60 "volumeSizeGB": volume_size_gb, 61 } 62 ) 63 if git_repositories is not UNSET: 64 field_dict["gitRepositories"] = git_repositories 65 if is_shared_with_project is not UNSET: 66 field_dict["isSharedWithProject"] = is_shared_with_project 67 68 return field_dict
70 @classmethod 71 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 72 d = src_dict.copy() 73 name = d.pop("name") 74 75 instance_type = d.pop("instanceType") 76 77 accelerator_types = cast(List[str], d.pop("acceleratorTypes")) 78 79 volume_size_gb = d.pop("volumeSizeGB") 80 81 def _parse_git_repositories(data: object) -> Union[List[str], None, Unset]: 82 if data is None: 83 return data 84 if isinstance(data, Unset): 85 return data 86 try: 87 if not isinstance(data, list): 88 raise TypeError() 89 git_repositories_type_0 = cast(List[str], data) 90 91 return git_repositories_type_0 92 except: # noqa: E722 93 pass 94 return cast(Union[List[str], None, Unset], data) 95 96 git_repositories = _parse_git_repositories(d.pop("gitRepositories", UNSET)) 97 98 is_shared_with_project = d.pop("isSharedWithProject", UNSET) 99 100 create_notebook_instance_request = cls( 101 name=name, 102 instance_type=instance_type, 103 accelerator_types=accelerator_types, 104 volume_size_gb=volume_size_gb, 105 git_repositories=git_repositories, 106 is_shared_with_project=is_shared_with_project, 107 ) 108 109 create_notebook_instance_request.additional_properties = d 110 return create_notebook_instance_request
12@_attrs_define 13class CreateProjectAccessRequest: 14 """ 15 Attributes: 16 role (ProjectRole): 17 message (str): 18 """ 19 20 role: ProjectRole 21 message: str 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 role = self.role.value 26 27 message = self.message 28 29 field_dict: Dict[str, Any] = {} 30 field_dict.update(self.additional_properties) 31 field_dict.update( 32 { 33 "role": role, 34 "message": message, 35 } 36 ) 37 38 return field_dict 39 40 @classmethod 41 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 42 d = src_dict.copy() 43 role = ProjectRole(d.pop("role")) 44 45 message = d.pop("message") 46 47 create_project_access_request = cls( 48 role=role, 49 message=message, 50 ) 51 52 create_project_access_request.additional_properties = d 53 return create_project_access_request 54 55 @property 56 def additional_keys(self) -> List[str]: 57 return list(self.additional_properties.keys())
Attributes:
- role (ProjectRole):
- message (str):
25def __init__(self, role, message): 26 self.role = role 27 self.message = message 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CreateProjectAccessRequest.
40 @classmethod 41 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 42 d = src_dict.copy() 43 role = ProjectRole(d.pop("role")) 44 45 message = d.pop("message") 46 47 create_project_access_request = cls( 48 role=role, 49 message=message, 50 ) 51 52 create_project_access_request.additional_properties = d 53 return create_project_access_request
10@_attrs_define 11class CreateReferenceRequest: 12 """ 13 Attributes: 14 name (str): 15 description (str): 16 type (str): 17 expected_files (List[str]): 18 """ 19 20 name: str 21 description: str 22 type: str 23 expected_files: List[str] 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 name = self.name 28 29 description = self.description 30 31 type = self.type 32 33 expected_files = self.expected_files 34 35 field_dict: Dict[str, Any] = {} 36 field_dict.update(self.additional_properties) 37 field_dict.update( 38 { 39 "name": name, 40 "description": description, 41 "type": type, 42 "expectedFiles": expected_files, 43 } 44 ) 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 50 d = src_dict.copy() 51 name = d.pop("name") 52 53 description = d.pop("description") 54 55 type = d.pop("type") 56 57 expected_files = cast(List[str], d.pop("expectedFiles")) 58 59 create_reference_request = cls( 60 name=name, 61 description=description, 62 type=type, 63 expected_files=expected_files, 64 ) 65 66 create_reference_request.additional_properties = d 67 return create_reference_request 68 69 @property 70 def additional_keys(self) -> List[str]: 71 return list(self.additional_properties.keys())
Attributes:
- name (str):
- description (str):
- type (str):
- expected_files (List[str]):
27def __init__(self, name, description, type, expected_files): 28 self.name = name 29 self.description = description 30 self.type = type 31 self.expected_files = expected_files 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CreateReferenceRequest.
26 def to_dict(self) -> Dict[str, Any]: 27 name = self.name 28 29 description = self.description 30 31 type = self.type 32 33 expected_files = self.expected_files 34 35 field_dict: Dict[str, Any] = {} 36 field_dict.update(self.additional_properties) 37 field_dict.update( 38 { 39 "name": name, 40 "description": description, 41 "type": type, 42 "expectedFiles": expected_files, 43 } 44 ) 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 50 d = src_dict.copy() 51 name = d.pop("name") 52 53 description = d.pop("description") 54 55 type = d.pop("type") 56 57 expected_files = cast(List[str], d.pop("expectedFiles")) 58 59 create_reference_request = cls( 60 name=name, 61 description=description, 62 type=type, 63 expected_files=expected_files, 64 ) 65 66 create_reference_request.additional_properties = d 67 return create_reference_request
10@_attrs_define 11class CreateResponse: 12 """ 13 Attributes: 14 id (str): 15 message (str): 16 """ 17 18 id: str 19 message: str 20 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 21 22 def to_dict(self) -> Dict[str, Any]: 23 id = self.id 24 25 message = self.message 26 27 field_dict: Dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "id": id, 32 "message": message, 33 } 34 ) 35 36 return field_dict 37 38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 id = d.pop("id") 42 43 message = d.pop("message") 44 45 create_response = cls( 46 id=id, 47 message=message, 48 ) 49 50 create_response.additional_properties = d 51 return create_response 52 53 @property 54 def additional_keys(self) -> List[str]: 55 return list(self.additional_properties.keys())
Attributes:
- id (str):
- message (str):
25def __init__(self, id, message): 26 self.id = id 27 self.message = message 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CreateResponse.
5class CustomerType(str, Enum): 6 CONSORTIUM = "CONSORTIUM" 7 EXTERNAL = "EXTERNAL" 8 INTERNAL = "INTERNAL" 9 UNKNOWN = "UNKNOWN" 10 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 11 12 def __str__(self) -> str: 13 return str(self.value) 14 15 @classmethod 16 def _missing_(cls, number): 17 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
16@_attrs_define 17class CustomPipelineSettings: 18 """Used to describe the location of the process definition dependencies 19 20 Attributes: 21 repository (str): GitHub repository that contains the process definition Example: CirroBio/my-pipeline. 22 branch (Union[Unset, str]): Branch, tag, or commit hash of the repo that contains the process definition 23 Default: 'main'. 24 folder (Union[Unset, str]): Folder within the repo that contains the process definition Default: '.cirro'. 25 repository_type (Union[None, RepositoryType, Unset]): 26 last_sync (Union[None, Unset, datetime.datetime]): Time of last sync 27 sync_status (Union[None, SyncStatus, Unset]): 28 commit_hash (Union[None, Unset, str]): Commit hash of the last successful sync 29 is_authorized (Union[Unset, bool]): Whether we are authorized to access the repository Default: False. 30 """ 31 32 repository: str 33 branch: Union[Unset, str] = "main" 34 folder: Union[Unset, str] = ".cirro" 35 repository_type: Union[None, RepositoryType, Unset] = UNSET 36 last_sync: Union[None, Unset, datetime.datetime] = UNSET 37 sync_status: Union[None, SyncStatus, Unset] = UNSET 38 commit_hash: Union[None, Unset, str] = UNSET 39 is_authorized: Union[Unset, bool] = False 40 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 41 42 def to_dict(self) -> Dict[str, Any]: 43 repository = self.repository 44 45 branch = self.branch 46 47 folder = self.folder 48 49 repository_type: Union[None, Unset, str] 50 if isinstance(self.repository_type, Unset): 51 repository_type = UNSET 52 elif isinstance(self.repository_type, RepositoryType): 53 repository_type = self.repository_type.value 54 else: 55 repository_type = self.repository_type 56 57 last_sync: Union[None, Unset, str] 58 if isinstance(self.last_sync, Unset): 59 last_sync = UNSET 60 elif isinstance(self.last_sync, datetime.datetime): 61 last_sync = self.last_sync.isoformat() 62 else: 63 last_sync = self.last_sync 64 65 sync_status: Union[None, Unset, str] 66 if isinstance(self.sync_status, Unset): 67 sync_status = UNSET 68 elif isinstance(self.sync_status, SyncStatus): 69 sync_status = self.sync_status.value 70 else: 71 sync_status = self.sync_status 72 73 commit_hash: Union[None, Unset, str] 74 if isinstance(self.commit_hash, Unset): 75 commit_hash = UNSET 76 else: 77 commit_hash = self.commit_hash 78 79 is_authorized = self.is_authorized 80 81 field_dict: Dict[str, Any] = {} 82 field_dict.update(self.additional_properties) 83 field_dict.update( 84 { 85 "repository": repository, 86 } 87 ) 88 if branch is not UNSET: 89 field_dict["branch"] = branch 90 if folder is not UNSET: 91 field_dict["folder"] = folder 92 if repository_type is not UNSET: 93 field_dict["repositoryType"] = repository_type 94 if last_sync is not UNSET: 95 field_dict["lastSync"] = last_sync 96 if sync_status is not UNSET: 97 field_dict["syncStatus"] = sync_status 98 if commit_hash is not UNSET: 99 field_dict["commitHash"] = commit_hash 100 if is_authorized is not UNSET: 101 field_dict["isAuthorized"] = is_authorized 102 103 return field_dict 104 105 @classmethod 106 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 107 d = src_dict.copy() 108 repository = d.pop("repository") 109 110 branch = d.pop("branch", UNSET) 111 112 folder = d.pop("folder", UNSET) 113 114 def _parse_repository_type(data: object) -> Union[None, RepositoryType, Unset]: 115 if data is None: 116 return data 117 if isinstance(data, Unset): 118 return data 119 try: 120 if not isinstance(data, str): 121 raise TypeError() 122 repository_type_type_1 = RepositoryType(data) 123 124 return repository_type_type_1 125 except: # noqa: E722 126 pass 127 return cast(Union[None, RepositoryType, Unset], data) 128 129 repository_type = _parse_repository_type(d.pop("repositoryType", UNSET)) 130 131 def _parse_last_sync(data: object) -> Union[None, Unset, datetime.datetime]: 132 if data is None: 133 return data 134 if isinstance(data, Unset): 135 return data 136 try: 137 if not isinstance(data, str): 138 raise TypeError() 139 last_sync_type_0 = isoparse(data) 140 141 return last_sync_type_0 142 except: # noqa: E722 143 pass 144 return cast(Union[None, Unset, datetime.datetime], data) 145 146 last_sync = _parse_last_sync(d.pop("lastSync", UNSET)) 147 148 def _parse_sync_status(data: object) -> Union[None, SyncStatus, Unset]: 149 if data is None: 150 return data 151 if isinstance(data, Unset): 152 return data 153 try: 154 if not isinstance(data, str): 155 raise TypeError() 156 sync_status_type_1 = SyncStatus(data) 157 158 return sync_status_type_1 159 except: # noqa: E722 160 pass 161 return cast(Union[None, SyncStatus, Unset], data) 162 163 sync_status = _parse_sync_status(d.pop("syncStatus", UNSET)) 164 165 def _parse_commit_hash(data: object) -> Union[None, Unset, str]: 166 if data is None: 167 return data 168 if isinstance(data, Unset): 169 return data 170 return cast(Union[None, Unset, str], data) 171 172 commit_hash = _parse_commit_hash(d.pop("commitHash", UNSET)) 173 174 is_authorized = d.pop("isAuthorized", UNSET) 175 176 custom_pipeline_settings = cls( 177 repository=repository, 178 branch=branch, 179 folder=folder, 180 repository_type=repository_type, 181 last_sync=last_sync, 182 sync_status=sync_status, 183 commit_hash=commit_hash, 184 is_authorized=is_authorized, 185 ) 186 187 custom_pipeline_settings.additional_properties = d 188 return custom_pipeline_settings 189 190 @property 191 def additional_keys(self) -> List[str]: 192 return list(self.additional_properties.keys())
Used to describe the location of the process definition dependencies
Attributes:
- repository (str): GitHub repository that contains the process definition Example: CirroBio/my-pipeline.
- branch (Union[Unset, str]): Branch, tag, or commit hash of the repo that contains the process definition Default: 'main'.
- folder (Union[Unset, str]): Folder within the repo that contains the process definition Default: '.cirro'.
- repository_type (Union[None, RepositoryType, Unset]):
- last_sync (Union[None, Unset, datetime.datetime]): Time of last sync
- sync_status (Union[None, SyncStatus, Unset]):
- commit_hash (Union[None, Unset, str]): Commit hash of the last successful sync
- is_authorized (Union[Unset, bool]): Whether we are authorized to access the repository Default: False.
31def __init__(self, repository, branch=attr_dict['branch'].default, folder=attr_dict['folder'].default, repository_type=attr_dict['repository_type'].default, last_sync=attr_dict['last_sync'].default, sync_status=attr_dict['sync_status'].default, commit_hash=attr_dict['commit_hash'].default, is_authorized=attr_dict['is_authorized'].default): 32 self.repository = repository 33 self.branch = branch 34 self.folder = folder 35 self.repository_type = repository_type 36 self.last_sync = last_sync 37 self.sync_status = sync_status 38 self.commit_hash = commit_hash 39 self.is_authorized = is_authorized 40 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CustomPipelineSettings.
42 def to_dict(self) -> Dict[str, Any]: 43 repository = self.repository 44 45 branch = self.branch 46 47 folder = self.folder 48 49 repository_type: Union[None, Unset, str] 50 if isinstance(self.repository_type, Unset): 51 repository_type = UNSET 52 elif isinstance(self.repository_type, RepositoryType): 53 repository_type = self.repository_type.value 54 else: 55 repository_type = self.repository_type 56 57 last_sync: Union[None, Unset, str] 58 if isinstance(self.last_sync, Unset): 59 last_sync = UNSET 60 elif isinstance(self.last_sync, datetime.datetime): 61 last_sync = self.last_sync.isoformat() 62 else: 63 last_sync = self.last_sync 64 65 sync_status: Union[None, Unset, str] 66 if isinstance(self.sync_status, Unset): 67 sync_status = UNSET 68 elif isinstance(self.sync_status, SyncStatus): 69 sync_status = self.sync_status.value 70 else: 71 sync_status = self.sync_status 72 73 commit_hash: Union[None, Unset, str] 74 if isinstance(self.commit_hash, Unset): 75 commit_hash = UNSET 76 else: 77 commit_hash = self.commit_hash 78 79 is_authorized = self.is_authorized 80 81 field_dict: Dict[str, Any] = {} 82 field_dict.update(self.additional_properties) 83 field_dict.update( 84 { 85 "repository": repository, 86 } 87 ) 88 if branch is not UNSET: 89 field_dict["branch"] = branch 90 if folder is not UNSET: 91 field_dict["folder"] = folder 92 if repository_type is not UNSET: 93 field_dict["repositoryType"] = repository_type 94 if last_sync is not UNSET: 95 field_dict["lastSync"] = last_sync 96 if sync_status is not UNSET: 97 field_dict["syncStatus"] = sync_status 98 if commit_hash is not UNSET: 99 field_dict["commitHash"] = commit_hash 100 if is_authorized is not UNSET: 101 field_dict["isAuthorized"] = is_authorized 102 103 return field_dict
105 @classmethod 106 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 107 d = src_dict.copy() 108 repository = d.pop("repository") 109 110 branch = d.pop("branch", UNSET) 111 112 folder = d.pop("folder", UNSET) 113 114 def _parse_repository_type(data: object) -> Union[None, RepositoryType, Unset]: 115 if data is None: 116 return data 117 if isinstance(data, Unset): 118 return data 119 try: 120 if not isinstance(data, str): 121 raise TypeError() 122 repository_type_type_1 = RepositoryType(data) 123 124 return repository_type_type_1 125 except: # noqa: E722 126 pass 127 return cast(Union[None, RepositoryType, Unset], data) 128 129 repository_type = _parse_repository_type(d.pop("repositoryType", UNSET)) 130 131 def _parse_last_sync(data: object) -> Union[None, Unset, datetime.datetime]: 132 if data is None: 133 return data 134 if isinstance(data, Unset): 135 return data 136 try: 137 if not isinstance(data, str): 138 raise TypeError() 139 last_sync_type_0 = isoparse(data) 140 141 return last_sync_type_0 142 except: # noqa: E722 143 pass 144 return cast(Union[None, Unset, datetime.datetime], data) 145 146 last_sync = _parse_last_sync(d.pop("lastSync", UNSET)) 147 148 def _parse_sync_status(data: object) -> Union[None, SyncStatus, Unset]: 149 if data is None: 150 return data 151 if isinstance(data, Unset): 152 return data 153 try: 154 if not isinstance(data, str): 155 raise TypeError() 156 sync_status_type_1 = SyncStatus(data) 157 158 return sync_status_type_1 159 except: # noqa: E722 160 pass 161 return cast(Union[None, SyncStatus, Unset], data) 162 163 sync_status = _parse_sync_status(d.pop("syncStatus", UNSET)) 164 165 def _parse_commit_hash(data: object) -> Union[None, Unset, str]: 166 if data is None: 167 return data 168 if isinstance(data, Unset): 169 return data 170 return cast(Union[None, Unset, str], data) 171 172 commit_hash = _parse_commit_hash(d.pop("commitHash", UNSET)) 173 174 is_authorized = d.pop("isAuthorized", UNSET) 175 176 custom_pipeline_settings = cls( 177 repository=repository, 178 branch=branch, 179 folder=folder, 180 repository_type=repository_type, 181 last_sync=last_sync, 182 sync_status=sync_status, 183 commit_hash=commit_hash, 184 is_authorized=is_authorized, 185 ) 186 187 custom_pipeline_settings.additional_properties = d 188 return custom_pipeline_settings
19@_attrs_define 20class CustomProcessInput: 21 """ 22 Attributes: 23 id (str): Unique ID of the Process Example: process-hutch-magic_flute-1_0. 24 name (str): Friendly name for the process Example: MAGeCK Flute. 25 description (str): Description of the process Example: MAGeCK Flute enables accurate identification of essential 26 genes with their related biological functions. 27 executor (Executor): How the workflow is executed 28 child_process_ids (List[str]): IDs of pipelines that can be run downstream 29 parent_process_ids (List[str]): IDs of processes that can run this pipeline 30 linked_project_ids (List[str]): Projects that can run this process 31 data_type (Union[None, Unset, str]): Name of the data type this pipeline produces (if it is not defined, use the 32 name) 33 category (Union[Unset, str]): Category of the process Example: Microbial Analysis. 34 documentation_url (Union[None, Unset, str]): Link to process documentation Example: 35 https://docs.cirro.bio/pipelines/catalog_targeted_sequencing/#crispr-screen-analysis. 36 file_requirements_message (Union[None, Unset, str]): Description of the files to be uploaded (optional) 37 pipeline_code (Union['PipelineCode', None, Unset]): 38 is_tenant_wide (Union[Unset, bool]): Whether the process is shared with the tenant 39 allow_multiple_sources (Union[Unset, bool]): Whether the pipeline is allowed to have multiple dataset sources 40 uses_sample_sheet (Union[Unset, bool]): Whether the pipeline uses the Cirro-provided sample sheet 41 custom_settings (Union['CustomPipelineSettings', None, Unset]): 42 file_mapping_rules (Union[List['FileMappingRule'], None, Unset]): 43 """ 44 45 id: str 46 name: str 47 description: str 48 executor: Executor 49 child_process_ids: List[str] 50 parent_process_ids: List[str] 51 linked_project_ids: List[str] 52 data_type: Union[None, Unset, str] = UNSET 53 category: Union[Unset, str] = UNSET 54 documentation_url: Union[None, Unset, str] = UNSET 55 file_requirements_message: Union[None, Unset, str] = UNSET 56 pipeline_code: Union["PipelineCode", None, Unset] = UNSET 57 is_tenant_wide: Union[Unset, bool] = UNSET 58 allow_multiple_sources: Union[Unset, bool] = UNSET 59 uses_sample_sheet: Union[Unset, bool] = UNSET 60 custom_settings: Union["CustomPipelineSettings", None, Unset] = UNSET 61 file_mapping_rules: Union[List["FileMappingRule"], None, Unset] = UNSET 62 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 63 64 def to_dict(self) -> Dict[str, Any]: 65 from ..models.custom_pipeline_settings import CustomPipelineSettings 66 from ..models.pipeline_code import PipelineCode 67 68 id = self.id 69 70 name = self.name 71 72 description = self.description 73 74 executor = self.executor.value 75 76 child_process_ids = self.child_process_ids 77 78 parent_process_ids = self.parent_process_ids 79 80 linked_project_ids = self.linked_project_ids 81 82 data_type: Union[None, Unset, str] 83 if isinstance(self.data_type, Unset): 84 data_type = UNSET 85 else: 86 data_type = self.data_type 87 88 category = self.category 89 90 documentation_url: Union[None, Unset, str] 91 if isinstance(self.documentation_url, Unset): 92 documentation_url = UNSET 93 else: 94 documentation_url = self.documentation_url 95 96 file_requirements_message: Union[None, Unset, str] 97 if isinstance(self.file_requirements_message, Unset): 98 file_requirements_message = UNSET 99 else: 100 file_requirements_message = self.file_requirements_message 101 102 pipeline_code: Union[Dict[str, Any], None, Unset] 103 if isinstance(self.pipeline_code, Unset): 104 pipeline_code = UNSET 105 elif isinstance(self.pipeline_code, PipelineCode): 106 pipeline_code = self.pipeline_code.to_dict() 107 else: 108 pipeline_code = self.pipeline_code 109 110 is_tenant_wide = self.is_tenant_wide 111 112 allow_multiple_sources = self.allow_multiple_sources 113 114 uses_sample_sheet = self.uses_sample_sheet 115 116 custom_settings: Union[Dict[str, Any], None, Unset] 117 if isinstance(self.custom_settings, Unset): 118 custom_settings = UNSET 119 elif isinstance(self.custom_settings, CustomPipelineSettings): 120 custom_settings = self.custom_settings.to_dict() 121 else: 122 custom_settings = self.custom_settings 123 124 file_mapping_rules: Union[List[Dict[str, Any]], None, Unset] 125 if isinstance(self.file_mapping_rules, Unset): 126 file_mapping_rules = UNSET 127 elif isinstance(self.file_mapping_rules, list): 128 file_mapping_rules = [] 129 for file_mapping_rules_type_0_item_data in self.file_mapping_rules: 130 file_mapping_rules_type_0_item = file_mapping_rules_type_0_item_data.to_dict() 131 file_mapping_rules.append(file_mapping_rules_type_0_item) 132 133 else: 134 file_mapping_rules = self.file_mapping_rules 135 136 field_dict: Dict[str, Any] = {} 137 field_dict.update(self.additional_properties) 138 field_dict.update( 139 { 140 "id": id, 141 "name": name, 142 "description": description, 143 "executor": executor, 144 "childProcessIds": child_process_ids, 145 "parentProcessIds": parent_process_ids, 146 "linkedProjectIds": linked_project_ids, 147 } 148 ) 149 if data_type is not UNSET: 150 field_dict["dataType"] = data_type 151 if category is not UNSET: 152 field_dict["category"] = category 153 if documentation_url is not UNSET: 154 field_dict["documentationUrl"] = documentation_url 155 if file_requirements_message is not UNSET: 156 field_dict["fileRequirementsMessage"] = file_requirements_message 157 if pipeline_code is not UNSET: 158 field_dict["pipelineCode"] = pipeline_code 159 if is_tenant_wide is not UNSET: 160 field_dict["isTenantWide"] = is_tenant_wide 161 if allow_multiple_sources is not UNSET: 162 field_dict["allowMultipleSources"] = allow_multiple_sources 163 if uses_sample_sheet is not UNSET: 164 field_dict["usesSampleSheet"] = uses_sample_sheet 165 if custom_settings is not UNSET: 166 field_dict["customSettings"] = custom_settings 167 if file_mapping_rules is not UNSET: 168 field_dict["fileMappingRules"] = file_mapping_rules 169 170 return field_dict 171 172 @classmethod 173 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 174 from ..models.custom_pipeline_settings import CustomPipelineSettings 175 from ..models.file_mapping_rule import FileMappingRule 176 from ..models.pipeline_code import PipelineCode 177 178 d = src_dict.copy() 179 id = d.pop("id") 180 181 name = d.pop("name") 182 183 description = d.pop("description") 184 185 executor = Executor(d.pop("executor")) 186 187 child_process_ids = cast(List[str], d.pop("childProcessIds")) 188 189 parent_process_ids = cast(List[str], d.pop("parentProcessIds")) 190 191 linked_project_ids = cast(List[str], d.pop("linkedProjectIds")) 192 193 def _parse_data_type(data: object) -> Union[None, Unset, str]: 194 if data is None: 195 return data 196 if isinstance(data, Unset): 197 return data 198 return cast(Union[None, Unset, str], data) 199 200 data_type = _parse_data_type(d.pop("dataType", UNSET)) 201 202 category = d.pop("category", UNSET) 203 204 def _parse_documentation_url(data: object) -> Union[None, Unset, str]: 205 if data is None: 206 return data 207 if isinstance(data, Unset): 208 return data 209 return cast(Union[None, Unset, str], data) 210 211 documentation_url = _parse_documentation_url(d.pop("documentationUrl", UNSET)) 212 213 def _parse_file_requirements_message(data: object) -> Union[None, Unset, str]: 214 if data is None: 215 return data 216 if isinstance(data, Unset): 217 return data 218 return cast(Union[None, Unset, str], data) 219 220 file_requirements_message = _parse_file_requirements_message(d.pop("fileRequirementsMessage", UNSET)) 221 222 def _parse_pipeline_code(data: object) -> Union["PipelineCode", None, Unset]: 223 if data is None: 224 return data 225 if isinstance(data, Unset): 226 return data 227 try: 228 if not isinstance(data, dict): 229 raise TypeError() 230 pipeline_code_type_1 = PipelineCode.from_dict(data) 231 232 return pipeline_code_type_1 233 except: # noqa: E722 234 pass 235 return cast(Union["PipelineCode", None, Unset], data) 236 237 pipeline_code = _parse_pipeline_code(d.pop("pipelineCode", UNSET)) 238 239 is_tenant_wide = d.pop("isTenantWide", UNSET) 240 241 allow_multiple_sources = d.pop("allowMultipleSources", UNSET) 242 243 uses_sample_sheet = d.pop("usesSampleSheet", UNSET) 244 245 def _parse_custom_settings(data: object) -> Union["CustomPipelineSettings", None, Unset]: 246 if data is None: 247 return data 248 if isinstance(data, Unset): 249 return data 250 try: 251 if not isinstance(data, dict): 252 raise TypeError() 253 custom_settings_type_1 = CustomPipelineSettings.from_dict(data) 254 255 return custom_settings_type_1 256 except: # noqa: E722 257 pass 258 return cast(Union["CustomPipelineSettings", None, Unset], data) 259 260 custom_settings = _parse_custom_settings(d.pop("customSettings", UNSET)) 261 262 def _parse_file_mapping_rules(data: object) -> Union[List["FileMappingRule"], None, Unset]: 263 if data is None: 264 return data 265 if isinstance(data, Unset): 266 return data 267 try: 268 if not isinstance(data, list): 269 raise TypeError() 270 file_mapping_rules_type_0 = [] 271 _file_mapping_rules_type_0 = data 272 for file_mapping_rules_type_0_item_data in _file_mapping_rules_type_0: 273 file_mapping_rules_type_0_item = FileMappingRule.from_dict(file_mapping_rules_type_0_item_data) 274 275 file_mapping_rules_type_0.append(file_mapping_rules_type_0_item) 276 277 return file_mapping_rules_type_0 278 except: # noqa: E722 279 pass 280 return cast(Union[List["FileMappingRule"], None, Unset], data) 281 282 file_mapping_rules = _parse_file_mapping_rules(d.pop("fileMappingRules", UNSET)) 283 284 custom_process_input = cls( 285 id=id, 286 name=name, 287 description=description, 288 executor=executor, 289 child_process_ids=child_process_ids, 290 parent_process_ids=parent_process_ids, 291 linked_project_ids=linked_project_ids, 292 data_type=data_type, 293 category=category, 294 documentation_url=documentation_url, 295 file_requirements_message=file_requirements_message, 296 pipeline_code=pipeline_code, 297 is_tenant_wide=is_tenant_wide, 298 allow_multiple_sources=allow_multiple_sources, 299 uses_sample_sheet=uses_sample_sheet, 300 custom_settings=custom_settings, 301 file_mapping_rules=file_mapping_rules, 302 ) 303 304 custom_process_input.additional_properties = d 305 return custom_process_input 306 307 @property 308 def additional_keys(self) -> List[str]: 309 return list(self.additional_properties.keys())
Attributes:
- id (str): Unique ID of the Process Example: process-hutch-magic_flute-1_0.
- name (str): Friendly name for the process Example: MAGeCK Flute.
- description (str): Description of the process Example: MAGeCK Flute enables accurate identification of essential genes with their related biological functions.
- executor (Executor): How the workflow is executed
- child_process_ids (List[str]): IDs of pipelines that can be run downstream
- parent_process_ids (List[str]): IDs of processes that can run this pipeline
- linked_project_ids (List[str]): Projects that can run this process
- data_type (Union[None, Unset, str]): Name of the data type this pipeline produces (if it is not defined, use the name)
- category (Union[Unset, str]): Category of the process Example: Microbial Analysis.
- documentation_url (Union[None, Unset, str]): Link to process documentation Example: https://docs.cirro.bio/pipelines/catalog_targeted_sequencing/#crispr-screen-analysis.
- file_requirements_message (Union[None, Unset, str]): Description of the files to be uploaded (optional)
- pipeline_code (Union['PipelineCode', None, Unset]):
- is_tenant_wide (Union[Unset, bool]): Whether the process is shared with the tenant
- allow_multiple_sources (Union[Unset, bool]): Whether the pipeline is allowed to have multiple dataset sources
- uses_sample_sheet (Union[Unset, bool]): Whether the pipeline uses the Cirro-provided sample sheet
- custom_settings (Union['CustomPipelineSettings', None, Unset]):
- file_mapping_rules (Union[List['FileMappingRule'], None, Unset]):
40def __init__(self, id, name, description, executor, child_process_ids, parent_process_ids, linked_project_ids, data_type=attr_dict['data_type'].default, category=attr_dict['category'].default, documentation_url=attr_dict['documentation_url'].default, file_requirements_message=attr_dict['file_requirements_message'].default, pipeline_code=attr_dict['pipeline_code'].default, is_tenant_wide=attr_dict['is_tenant_wide'].default, allow_multiple_sources=attr_dict['allow_multiple_sources'].default, uses_sample_sheet=attr_dict['uses_sample_sheet'].default, custom_settings=attr_dict['custom_settings'].default, file_mapping_rules=attr_dict['file_mapping_rules'].default): 41 self.id = id 42 self.name = name 43 self.description = description 44 self.executor = executor 45 self.child_process_ids = child_process_ids 46 self.parent_process_ids = parent_process_ids 47 self.linked_project_ids = linked_project_ids 48 self.data_type = data_type 49 self.category = category 50 self.documentation_url = documentation_url 51 self.file_requirements_message = file_requirements_message 52 self.pipeline_code = pipeline_code 53 self.is_tenant_wide = is_tenant_wide 54 self.allow_multiple_sources = allow_multiple_sources 55 self.uses_sample_sheet = uses_sample_sheet 56 self.custom_settings = custom_settings 57 self.file_mapping_rules = file_mapping_rules 58 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class CustomProcessInput.
64 def to_dict(self) -> Dict[str, Any]: 65 from ..models.custom_pipeline_settings import CustomPipelineSettings 66 from ..models.pipeline_code import PipelineCode 67 68 id = self.id 69 70 name = self.name 71 72 description = self.description 73 74 executor = self.executor.value 75 76 child_process_ids = self.child_process_ids 77 78 parent_process_ids = self.parent_process_ids 79 80 linked_project_ids = self.linked_project_ids 81 82 data_type: Union[None, Unset, str] 83 if isinstance(self.data_type, Unset): 84 data_type = UNSET 85 else: 86 data_type = self.data_type 87 88 category = self.category 89 90 documentation_url: Union[None, Unset, str] 91 if isinstance(self.documentation_url, Unset): 92 documentation_url = UNSET 93 else: 94 documentation_url = self.documentation_url 95 96 file_requirements_message: Union[None, Unset, str] 97 if isinstance(self.file_requirements_message, Unset): 98 file_requirements_message = UNSET 99 else: 100 file_requirements_message = self.file_requirements_message 101 102 pipeline_code: Union[Dict[str, Any], None, Unset] 103 if isinstance(self.pipeline_code, Unset): 104 pipeline_code = UNSET 105 elif isinstance(self.pipeline_code, PipelineCode): 106 pipeline_code = self.pipeline_code.to_dict() 107 else: 108 pipeline_code = self.pipeline_code 109 110 is_tenant_wide = self.is_tenant_wide 111 112 allow_multiple_sources = self.allow_multiple_sources 113 114 uses_sample_sheet = self.uses_sample_sheet 115 116 custom_settings: Union[Dict[str, Any], None, Unset] 117 if isinstance(self.custom_settings, Unset): 118 custom_settings = UNSET 119 elif isinstance(self.custom_settings, CustomPipelineSettings): 120 custom_settings = self.custom_settings.to_dict() 121 else: 122 custom_settings = self.custom_settings 123 124 file_mapping_rules: Union[List[Dict[str, Any]], None, Unset] 125 if isinstance(self.file_mapping_rules, Unset): 126 file_mapping_rules = UNSET 127 elif isinstance(self.file_mapping_rules, list): 128 file_mapping_rules = [] 129 for file_mapping_rules_type_0_item_data in self.file_mapping_rules: 130 file_mapping_rules_type_0_item = file_mapping_rules_type_0_item_data.to_dict() 131 file_mapping_rules.append(file_mapping_rules_type_0_item) 132 133 else: 134 file_mapping_rules = self.file_mapping_rules 135 136 field_dict: Dict[str, Any] = {} 137 field_dict.update(self.additional_properties) 138 field_dict.update( 139 { 140 "id": id, 141 "name": name, 142 "description": description, 143 "executor": executor, 144 "childProcessIds": child_process_ids, 145 "parentProcessIds": parent_process_ids, 146 "linkedProjectIds": linked_project_ids, 147 } 148 ) 149 if data_type is not UNSET: 150 field_dict["dataType"] = data_type 151 if category is not UNSET: 152 field_dict["category"] = category 153 if documentation_url is not UNSET: 154 field_dict["documentationUrl"] = documentation_url 155 if file_requirements_message is not UNSET: 156 field_dict["fileRequirementsMessage"] = file_requirements_message 157 if pipeline_code is not UNSET: 158 field_dict["pipelineCode"] = pipeline_code 159 if is_tenant_wide is not UNSET: 160 field_dict["isTenantWide"] = is_tenant_wide 161 if allow_multiple_sources is not UNSET: 162 field_dict["allowMultipleSources"] = allow_multiple_sources 163 if uses_sample_sheet is not UNSET: 164 field_dict["usesSampleSheet"] = uses_sample_sheet 165 if custom_settings is not UNSET: 166 field_dict["customSettings"] = custom_settings 167 if file_mapping_rules is not UNSET: 168 field_dict["fileMappingRules"] = file_mapping_rules 169 170 return field_dict
172 @classmethod 173 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 174 from ..models.custom_pipeline_settings import CustomPipelineSettings 175 from ..models.file_mapping_rule import FileMappingRule 176 from ..models.pipeline_code import PipelineCode 177 178 d = src_dict.copy() 179 id = d.pop("id") 180 181 name = d.pop("name") 182 183 description = d.pop("description") 184 185 executor = Executor(d.pop("executor")) 186 187 child_process_ids = cast(List[str], d.pop("childProcessIds")) 188 189 parent_process_ids = cast(List[str], d.pop("parentProcessIds")) 190 191 linked_project_ids = cast(List[str], d.pop("linkedProjectIds")) 192 193 def _parse_data_type(data: object) -> Union[None, Unset, str]: 194 if data is None: 195 return data 196 if isinstance(data, Unset): 197 return data 198 return cast(Union[None, Unset, str], data) 199 200 data_type = _parse_data_type(d.pop("dataType", UNSET)) 201 202 category = d.pop("category", UNSET) 203 204 def _parse_documentation_url(data: object) -> Union[None, Unset, str]: 205 if data is None: 206 return data 207 if isinstance(data, Unset): 208 return data 209 return cast(Union[None, Unset, str], data) 210 211 documentation_url = _parse_documentation_url(d.pop("documentationUrl", UNSET)) 212 213 def _parse_file_requirements_message(data: object) -> Union[None, Unset, str]: 214 if data is None: 215 return data 216 if isinstance(data, Unset): 217 return data 218 return cast(Union[None, Unset, str], data) 219 220 file_requirements_message = _parse_file_requirements_message(d.pop("fileRequirementsMessage", UNSET)) 221 222 def _parse_pipeline_code(data: object) -> Union["PipelineCode", None, Unset]: 223 if data is None: 224 return data 225 if isinstance(data, Unset): 226 return data 227 try: 228 if not isinstance(data, dict): 229 raise TypeError() 230 pipeline_code_type_1 = PipelineCode.from_dict(data) 231 232 return pipeline_code_type_1 233 except: # noqa: E722 234 pass 235 return cast(Union["PipelineCode", None, Unset], data) 236 237 pipeline_code = _parse_pipeline_code(d.pop("pipelineCode", UNSET)) 238 239 is_tenant_wide = d.pop("isTenantWide", UNSET) 240 241 allow_multiple_sources = d.pop("allowMultipleSources", UNSET) 242 243 uses_sample_sheet = d.pop("usesSampleSheet", UNSET) 244 245 def _parse_custom_settings(data: object) -> Union["CustomPipelineSettings", None, Unset]: 246 if data is None: 247 return data 248 if isinstance(data, Unset): 249 return data 250 try: 251 if not isinstance(data, dict): 252 raise TypeError() 253 custom_settings_type_1 = CustomPipelineSettings.from_dict(data) 254 255 return custom_settings_type_1 256 except: # noqa: E722 257 pass 258 return cast(Union["CustomPipelineSettings", None, Unset], data) 259 260 custom_settings = _parse_custom_settings(d.pop("customSettings", UNSET)) 261 262 def _parse_file_mapping_rules(data: object) -> Union[List["FileMappingRule"], None, Unset]: 263 if data is None: 264 return data 265 if isinstance(data, Unset): 266 return data 267 try: 268 if not isinstance(data, list): 269 raise TypeError() 270 file_mapping_rules_type_0 = [] 271 _file_mapping_rules_type_0 = data 272 for file_mapping_rules_type_0_item_data in _file_mapping_rules_type_0: 273 file_mapping_rules_type_0_item = FileMappingRule.from_dict(file_mapping_rules_type_0_item_data) 274 275 file_mapping_rules_type_0.append(file_mapping_rules_type_0_item) 276 277 return file_mapping_rules_type_0 278 except: # noqa: E722 279 pass 280 return cast(Union[List["FileMappingRule"], None, Unset], data) 281 282 file_mapping_rules = _parse_file_mapping_rules(d.pop("fileMappingRules", UNSET)) 283 284 custom_process_input = cls( 285 id=id, 286 name=name, 287 description=description, 288 executor=executor, 289 child_process_ids=child_process_ids, 290 parent_process_ids=parent_process_ids, 291 linked_project_ids=linked_project_ids, 292 data_type=data_type, 293 category=category, 294 documentation_url=documentation_url, 295 file_requirements_message=file_requirements_message, 296 pipeline_code=pipeline_code, 297 is_tenant_wide=is_tenant_wide, 298 allow_multiple_sources=allow_multiple_sources, 299 uses_sample_sheet=uses_sample_sheet, 300 custom_settings=custom_settings, 301 file_mapping_rules=file_mapping_rules, 302 ) 303 304 custom_process_input.additional_properties = d 305 return custom_process_input
17@_attrs_define 18class Dashboard: 19 """ 20 Attributes: 21 id (str): 22 name (str): 23 description (str): 24 process_ids (List[str]): 25 dashboard_data (DashboardDashboardData): 26 info (DashboardInfo): 27 created_by (str): 28 created_at (datetime.datetime): 29 updated_at (datetime.datetime): 30 """ 31 32 id: str 33 name: str 34 description: str 35 process_ids: List[str] 36 dashboard_data: "DashboardDashboardData" 37 info: "DashboardInfo" 38 created_by: str 39 created_at: datetime.datetime 40 updated_at: datetime.datetime 41 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 42 43 def to_dict(self) -> Dict[str, Any]: 44 id = self.id 45 46 name = self.name 47 48 description = self.description 49 50 process_ids = self.process_ids 51 52 dashboard_data = self.dashboard_data.to_dict() 53 54 info = self.info.to_dict() 55 56 created_by = self.created_by 57 58 created_at = self.created_at.isoformat() 59 60 updated_at = self.updated_at.isoformat() 61 62 field_dict: Dict[str, Any] = {} 63 field_dict.update(self.additional_properties) 64 field_dict.update( 65 { 66 "id": id, 67 "name": name, 68 "description": description, 69 "processIds": process_ids, 70 "dashboardData": dashboard_data, 71 "info": info, 72 "createdBy": created_by, 73 "createdAt": created_at, 74 "updatedAt": updated_at, 75 } 76 ) 77 78 return field_dict 79 80 @classmethod 81 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 82 from ..models.dashboard_dashboard_data import DashboardDashboardData 83 from ..models.dashboard_info import DashboardInfo 84 85 d = src_dict.copy() 86 id = d.pop("id") 87 88 name = d.pop("name") 89 90 description = d.pop("description") 91 92 process_ids = cast(List[str], d.pop("processIds")) 93 94 dashboard_data = DashboardDashboardData.from_dict(d.pop("dashboardData")) 95 96 info = DashboardInfo.from_dict(d.pop("info")) 97 98 created_by = d.pop("createdBy") 99 100 created_at = isoparse(d.pop("createdAt")) 101 102 updated_at = isoparse(d.pop("updatedAt")) 103 104 dashboard = cls( 105 id=id, 106 name=name, 107 description=description, 108 process_ids=process_ids, 109 dashboard_data=dashboard_data, 110 info=info, 111 created_by=created_by, 112 created_at=created_at, 113 updated_at=updated_at, 114 ) 115 116 dashboard.additional_properties = d 117 return dashboard 118 119 @property 120 def additional_keys(self) -> List[str]: 121 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- description (str):
- process_ids (List[str]):
- dashboard_data (DashboardDashboardData):
- info (DashboardInfo):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
32def __init__(self, id, name, description, process_ids, dashboard_data, info, created_by, created_at, updated_at): 33 self.id = id 34 self.name = name 35 self.description = description 36 self.process_ids = process_ids 37 self.dashboard_data = dashboard_data 38 self.info = info 39 self.created_by = created_by 40 self.created_at = created_at 41 self.updated_at = updated_at 42 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Dashboard.
43 def to_dict(self) -> Dict[str, Any]: 44 id = self.id 45 46 name = self.name 47 48 description = self.description 49 50 process_ids = self.process_ids 51 52 dashboard_data = self.dashboard_data.to_dict() 53 54 info = self.info.to_dict() 55 56 created_by = self.created_by 57 58 created_at = self.created_at.isoformat() 59 60 updated_at = self.updated_at.isoformat() 61 62 field_dict: Dict[str, Any] = {} 63 field_dict.update(self.additional_properties) 64 field_dict.update( 65 { 66 "id": id, 67 "name": name, 68 "description": description, 69 "processIds": process_ids, 70 "dashboardData": dashboard_data, 71 "info": info, 72 "createdBy": created_by, 73 "createdAt": created_at, 74 "updatedAt": updated_at, 75 } 76 ) 77 78 return field_dict
80 @classmethod 81 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 82 from ..models.dashboard_dashboard_data import DashboardDashboardData 83 from ..models.dashboard_info import DashboardInfo 84 85 d = src_dict.copy() 86 id = d.pop("id") 87 88 name = d.pop("name") 89 90 description = d.pop("description") 91 92 process_ids = cast(List[str], d.pop("processIds")) 93 94 dashboard_data = DashboardDashboardData.from_dict(d.pop("dashboardData")) 95 96 info = DashboardInfo.from_dict(d.pop("info")) 97 98 created_by = d.pop("createdBy") 99 100 created_at = isoparse(d.pop("createdAt")) 101 102 updated_at = isoparse(d.pop("updatedAt")) 103 104 dashboard = cls( 105 id=id, 106 name=name, 107 description=description, 108 process_ids=process_ids, 109 dashboard_data=dashboard_data, 110 info=info, 111 created_by=created_by, 112 created_at=created_at, 113 updated_at=updated_at, 114 ) 115 116 dashboard.additional_properties = d 117 return dashboard
10@_attrs_define 11class DashboardDashboardData: 12 """ """ 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 dashboard_dashboard_data = cls() 27 28 dashboard_dashboard_data.additional_properties = d 29 return dashboard_dashboard_data 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
10@_attrs_define 11class DashboardInfo: 12 """ """ 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 dashboard_info = cls() 27 28 dashboard_info.additional_properties = d 29 return dashboard_info 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
15@_attrs_define 16class DashboardRequest: 17 """ 18 Attributes: 19 name (str): 20 description (str): 21 process_ids (List[str]): 22 dashboard_data (DashboardRequestDashboardData): 23 info (DashboardRequestInfo): 24 """ 25 26 name: str 27 description: str 28 process_ids: List[str] 29 dashboard_data: "DashboardRequestDashboardData" 30 info: "DashboardRequestInfo" 31 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 32 33 def to_dict(self) -> Dict[str, Any]: 34 name = self.name 35 36 description = self.description 37 38 process_ids = self.process_ids 39 40 dashboard_data = self.dashboard_data.to_dict() 41 42 info = self.info.to_dict() 43 44 field_dict: Dict[str, Any] = {} 45 field_dict.update(self.additional_properties) 46 field_dict.update( 47 { 48 "name": name, 49 "description": description, 50 "processIds": process_ids, 51 "dashboardData": dashboard_data, 52 "info": info, 53 } 54 ) 55 56 return field_dict 57 58 @classmethod 59 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 60 from ..models.dashboard_request_dashboard_data import DashboardRequestDashboardData 61 from ..models.dashboard_request_info import DashboardRequestInfo 62 63 d = src_dict.copy() 64 name = d.pop("name") 65 66 description = d.pop("description") 67 68 process_ids = cast(List[str], d.pop("processIds")) 69 70 dashboard_data = DashboardRequestDashboardData.from_dict(d.pop("dashboardData")) 71 72 info = DashboardRequestInfo.from_dict(d.pop("info")) 73 74 dashboard_request = cls( 75 name=name, 76 description=description, 77 process_ids=process_ids, 78 dashboard_data=dashboard_data, 79 info=info, 80 ) 81 82 dashboard_request.additional_properties = d 83 return dashboard_request 84 85 @property 86 def additional_keys(self) -> List[str]: 87 return list(self.additional_properties.keys())
Attributes:
- name (str):
- description (str):
- process_ids (List[str]):
- dashboard_data (DashboardRequestDashboardData):
- info (DashboardRequestInfo):
28def __init__(self, name, description, process_ids, dashboard_data, info): 29 self.name = name 30 self.description = description 31 self.process_ids = process_ids 32 self.dashboard_data = dashboard_data 33 self.info = info 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DashboardRequest.
33 def to_dict(self) -> Dict[str, Any]: 34 name = self.name 35 36 description = self.description 37 38 process_ids = self.process_ids 39 40 dashboard_data = self.dashboard_data.to_dict() 41 42 info = self.info.to_dict() 43 44 field_dict: Dict[str, Any] = {} 45 field_dict.update(self.additional_properties) 46 field_dict.update( 47 { 48 "name": name, 49 "description": description, 50 "processIds": process_ids, 51 "dashboardData": dashboard_data, 52 "info": info, 53 } 54 ) 55 56 return field_dict
58 @classmethod 59 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 60 from ..models.dashboard_request_dashboard_data import DashboardRequestDashboardData 61 from ..models.dashboard_request_info import DashboardRequestInfo 62 63 d = src_dict.copy() 64 name = d.pop("name") 65 66 description = d.pop("description") 67 68 process_ids = cast(List[str], d.pop("processIds")) 69 70 dashboard_data = DashboardRequestDashboardData.from_dict(d.pop("dashboardData")) 71 72 info = DashboardRequestInfo.from_dict(d.pop("info")) 73 74 dashboard_request = cls( 75 name=name, 76 description=description, 77 process_ids=process_ids, 78 dashboard_data=dashboard_data, 79 info=info, 80 ) 81 82 dashboard_request.additional_properties = d 83 return dashboard_request
10@_attrs_define 11class DashboardRequestDashboardData: 12 """ """ 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 dashboard_request_dashboard_data = cls() 27 28 dashboard_request_dashboard_data.additional_properties = d 29 return dashboard_request_dashboard_data 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
10@_attrs_define 11class DashboardRequestInfo: 12 """ """ 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 dashboard_request_info = cls() 27 28 dashboard_request_info.additional_properties = d 29 return dashboard_request_info 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
14@_attrs_define 15class DataFile: 16 """ 17 Attributes: 18 path (str): 19 metadata (DataFileMetadata): 20 """ 21 22 path: str 23 metadata: "DataFileMetadata" 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 path = self.path 28 29 metadata = self.metadata.to_dict() 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "path": path, 36 "metadata": metadata, 37 } 38 ) 39 40 return field_dict 41 42 @classmethod 43 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 44 from ..models.data_file_metadata import DataFileMetadata 45 46 d = src_dict.copy() 47 path = d.pop("path") 48 49 metadata = DataFileMetadata.from_dict(d.pop("metadata")) 50 51 data_file = cls( 52 path=path, 53 metadata=metadata, 54 ) 55 56 data_file.additional_properties = d 57 return data_file 58 59 @property 60 def additional_keys(self) -> List[str]: 61 return list(self.additional_properties.keys())
Attributes:
- path (str):
- metadata (DataFileMetadata):
25def __init__(self, path, metadata): 26 self.path = path 27 self.metadata = metadata 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DataFile.
42 @classmethod 43 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 44 from ..models.data_file_metadata import DataFileMetadata 45 46 d = src_dict.copy() 47 path = d.pop("path") 48 49 metadata = DataFileMetadata.from_dict(d.pop("metadata")) 50 51 data_file = cls( 52 path=path, 53 metadata=metadata, 54 ) 55 56 data_file.additional_properties = d 57 return data_file
10@_attrs_define 11class DataFileMetadata: 12 """ """ 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 data_file_metadata = cls() 27 28 data_file_metadata.additional_properties = d 29 return data_file_metadata 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
18@_attrs_define 19class Dataset: 20 """ 21 Attributes: 22 id (str): 23 name (str): 24 description (str): 25 project_id (str): 26 process_id (str): 27 source_dataset_ids (List[str]): 28 status (Status): 29 tags (List['Tag']): 30 created_by (str): 31 created_at (datetime.datetime): 32 updated_at (datetime.datetime): 33 """ 34 35 id: str 36 name: str 37 description: str 38 project_id: str 39 process_id: str 40 source_dataset_ids: List[str] 41 status: Status 42 tags: List["Tag"] 43 created_by: str 44 created_at: datetime.datetime 45 updated_at: datetime.datetime 46 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 47 48 def to_dict(self) -> Dict[str, Any]: 49 id = self.id 50 51 name = self.name 52 53 description = self.description 54 55 project_id = self.project_id 56 57 process_id = self.process_id 58 59 source_dataset_ids = self.source_dataset_ids 60 61 status = self.status.value 62 63 tags = [] 64 for tags_item_data in self.tags: 65 tags_item = tags_item_data.to_dict() 66 tags.append(tags_item) 67 68 created_by = self.created_by 69 70 created_at = self.created_at.isoformat() 71 72 updated_at = self.updated_at.isoformat() 73 74 field_dict: Dict[str, Any] = {} 75 field_dict.update(self.additional_properties) 76 field_dict.update( 77 { 78 "id": id, 79 "name": name, 80 "description": description, 81 "projectId": project_id, 82 "processId": process_id, 83 "sourceDatasetIds": source_dataset_ids, 84 "status": status, 85 "tags": tags, 86 "createdBy": created_by, 87 "createdAt": created_at, 88 "updatedAt": updated_at, 89 } 90 ) 91 92 return field_dict 93 94 @classmethod 95 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 96 from ..models.tag import Tag 97 98 d = src_dict.copy() 99 id = d.pop("id") 100 101 name = d.pop("name") 102 103 description = d.pop("description") 104 105 project_id = d.pop("projectId") 106 107 process_id = d.pop("processId") 108 109 source_dataset_ids = cast(List[str], d.pop("sourceDatasetIds")) 110 111 status = Status(d.pop("status")) 112 113 tags = [] 114 _tags = d.pop("tags") 115 for tags_item_data in _tags: 116 tags_item = Tag.from_dict(tags_item_data) 117 118 tags.append(tags_item) 119 120 created_by = d.pop("createdBy") 121 122 created_at = isoparse(d.pop("createdAt")) 123 124 updated_at = isoparse(d.pop("updatedAt")) 125 126 dataset = cls( 127 id=id, 128 name=name, 129 description=description, 130 project_id=project_id, 131 process_id=process_id, 132 source_dataset_ids=source_dataset_ids, 133 status=status, 134 tags=tags, 135 created_by=created_by, 136 created_at=created_at, 137 updated_at=updated_at, 138 ) 139 140 dataset.additional_properties = d 141 return dataset 142 143 @property 144 def additional_keys(self) -> List[str]: 145 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- description (str):
- project_id (str):
- process_id (str):
- source_dataset_ids (List[str]):
- status (Status):
- tags (List['Tag']):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
34def __init__(self, id, name, description, project_id, process_id, source_dataset_ids, status, tags, created_by, created_at, updated_at): 35 self.id = id 36 self.name = name 37 self.description = description 38 self.project_id = project_id 39 self.process_id = process_id 40 self.source_dataset_ids = source_dataset_ids 41 self.status = status 42 self.tags = tags 43 self.created_by = created_by 44 self.created_at = created_at 45 self.updated_at = updated_at 46 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Dataset.
48 def to_dict(self) -> Dict[str, Any]: 49 id = self.id 50 51 name = self.name 52 53 description = self.description 54 55 project_id = self.project_id 56 57 process_id = self.process_id 58 59 source_dataset_ids = self.source_dataset_ids 60 61 status = self.status.value 62 63 tags = [] 64 for tags_item_data in self.tags: 65 tags_item = tags_item_data.to_dict() 66 tags.append(tags_item) 67 68 created_by = self.created_by 69 70 created_at = self.created_at.isoformat() 71 72 updated_at = self.updated_at.isoformat() 73 74 field_dict: Dict[str, Any] = {} 75 field_dict.update(self.additional_properties) 76 field_dict.update( 77 { 78 "id": id, 79 "name": name, 80 "description": description, 81 "projectId": project_id, 82 "processId": process_id, 83 "sourceDatasetIds": source_dataset_ids, 84 "status": status, 85 "tags": tags, 86 "createdBy": created_by, 87 "createdAt": created_at, 88 "updatedAt": updated_at, 89 } 90 ) 91 92 return field_dict
94 @classmethod 95 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 96 from ..models.tag import Tag 97 98 d = src_dict.copy() 99 id = d.pop("id") 100 101 name = d.pop("name") 102 103 description = d.pop("description") 104 105 project_id = d.pop("projectId") 106 107 process_id = d.pop("processId") 108 109 source_dataset_ids = cast(List[str], d.pop("sourceDatasetIds")) 110 111 status = Status(d.pop("status")) 112 113 tags = [] 114 _tags = d.pop("tags") 115 for tags_item_data in _tags: 116 tags_item = Tag.from_dict(tags_item_data) 117 118 tags.append(tags_item) 119 120 created_by = d.pop("createdBy") 121 122 created_at = isoparse(d.pop("createdAt")) 123 124 updated_at = isoparse(d.pop("updatedAt")) 125 126 dataset = cls( 127 id=id, 128 name=name, 129 description=description, 130 project_id=project_id, 131 process_id=process_id, 132 source_dataset_ids=source_dataset_ids, 133 status=status, 134 tags=tags, 135 created_by=created_by, 136 created_at=created_at, 137 updated_at=updated_at, 138 ) 139 140 dataset.additional_properties = d 141 return dataset
19@_attrs_define 20class DatasetAssetsManifest: 21 """ 22 Attributes: 23 domain (Union[Unset, str]): Base URL for files Example: s3://project-1a1a/datasets/1a1a. 24 files (Union[Unset, List['FileEntry']]): List of files in the dataset, including metadata 25 total_files (Union[Unset, int]): Total number of files in the dataset, used for pagination 26 viz (Union[Unset, List['DatasetViz']]): List of viz to render for the dataset 27 tables (Union[Unset, List['Table']]): List of web optimized tables for the dataset 28 artifacts (Union[Unset, List['Artifact']]): Artifacts associated with the dataset 29 """ 30 31 domain: Union[Unset, str] = UNSET 32 files: Union[Unset, List["FileEntry"]] = UNSET 33 total_files: Union[Unset, int] = UNSET 34 viz: Union[Unset, List["DatasetViz"]] = UNSET 35 tables: Union[Unset, List["Table"]] = UNSET 36 artifacts: Union[Unset, List["Artifact"]] = UNSET 37 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 38 39 def to_dict(self) -> Dict[str, Any]: 40 domain = self.domain 41 42 files: Union[Unset, List[Dict[str, Any]]] = UNSET 43 if not isinstance(self.files, Unset): 44 files = [] 45 for files_item_data in self.files: 46 files_item = files_item_data.to_dict() 47 files.append(files_item) 48 49 total_files = self.total_files 50 51 viz: Union[Unset, List[Dict[str, Any]]] = UNSET 52 if not isinstance(self.viz, Unset): 53 viz = [] 54 for viz_item_data in self.viz: 55 viz_item = viz_item_data.to_dict() 56 viz.append(viz_item) 57 58 tables: Union[Unset, List[Dict[str, Any]]] = UNSET 59 if not isinstance(self.tables, Unset): 60 tables = [] 61 for tables_item_data in self.tables: 62 tables_item = tables_item_data.to_dict() 63 tables.append(tables_item) 64 65 artifacts: Union[Unset, List[Dict[str, Any]]] = UNSET 66 if not isinstance(self.artifacts, Unset): 67 artifacts = [] 68 for artifacts_item_data in self.artifacts: 69 artifacts_item = artifacts_item_data.to_dict() 70 artifacts.append(artifacts_item) 71 72 field_dict: Dict[str, Any] = {} 73 field_dict.update(self.additional_properties) 74 field_dict.update({}) 75 if domain is not UNSET: 76 field_dict["domain"] = domain 77 if files is not UNSET: 78 field_dict["files"] = files 79 if total_files is not UNSET: 80 field_dict["totalFiles"] = total_files 81 if viz is not UNSET: 82 field_dict["viz"] = viz 83 if tables is not UNSET: 84 field_dict["tables"] = tables 85 if artifacts is not UNSET: 86 field_dict["artifacts"] = artifacts 87 88 return field_dict 89 90 @classmethod 91 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 92 from ..models.artifact import Artifact 93 from ..models.dataset_viz import DatasetViz 94 from ..models.file_entry import FileEntry 95 from ..models.table import Table 96 97 d = src_dict.copy() 98 domain = d.pop("domain", UNSET) 99 100 files = [] 101 _files = d.pop("files", UNSET) 102 for files_item_data in _files or []: 103 files_item = FileEntry.from_dict(files_item_data) 104 105 files.append(files_item) 106 107 total_files = d.pop("totalFiles", UNSET) 108 109 viz = [] 110 _viz = d.pop("viz", UNSET) 111 for viz_item_data in _viz or []: 112 viz_item = DatasetViz.from_dict(viz_item_data) 113 114 viz.append(viz_item) 115 116 tables = [] 117 _tables = d.pop("tables", UNSET) 118 for tables_item_data in _tables or []: 119 tables_item = Table.from_dict(tables_item_data) 120 121 tables.append(tables_item) 122 123 artifacts = [] 124 _artifacts = d.pop("artifacts", UNSET) 125 for artifacts_item_data in _artifacts or []: 126 artifacts_item = Artifact.from_dict(artifacts_item_data) 127 128 artifacts.append(artifacts_item) 129 130 dataset_assets_manifest = cls( 131 domain=domain, 132 files=files, 133 total_files=total_files, 134 viz=viz, 135 tables=tables, 136 artifacts=artifacts, 137 ) 138 139 dataset_assets_manifest.additional_properties = d 140 return dataset_assets_manifest 141 142 @property 143 def additional_keys(self) -> List[str]: 144 return list(self.additional_properties.keys())
Attributes:
- domain (Union[Unset, str]): Base URL for files Example: s3://project-1a1a/datasets/1a1a.
- files (Union[Unset, List['FileEntry']]): List of files in the dataset, including metadata
- total_files (Union[Unset, int]): Total number of files in the dataset, used for pagination
- viz (Union[Unset, List['DatasetViz']]): List of viz to render for the dataset
- tables (Union[Unset, List['Table']]): List of web optimized tables for the dataset
- artifacts (Union[Unset, List['Artifact']]): Artifacts associated with the dataset
29def __init__(self, domain=attr_dict['domain'].default, files=attr_dict['files'].default, total_files=attr_dict['total_files'].default, viz=attr_dict['viz'].default, tables=attr_dict['tables'].default, artifacts=attr_dict['artifacts'].default): 30 self.domain = domain 31 self.files = files 32 self.total_files = total_files 33 self.viz = viz 34 self.tables = tables 35 self.artifacts = artifacts 36 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DatasetAssetsManifest.
39 def to_dict(self) -> Dict[str, Any]: 40 domain = self.domain 41 42 files: Union[Unset, List[Dict[str, Any]]] = UNSET 43 if not isinstance(self.files, Unset): 44 files = [] 45 for files_item_data in self.files: 46 files_item = files_item_data.to_dict() 47 files.append(files_item) 48 49 total_files = self.total_files 50 51 viz: Union[Unset, List[Dict[str, Any]]] = UNSET 52 if not isinstance(self.viz, Unset): 53 viz = [] 54 for viz_item_data in self.viz: 55 viz_item = viz_item_data.to_dict() 56 viz.append(viz_item) 57 58 tables: Union[Unset, List[Dict[str, Any]]] = UNSET 59 if not isinstance(self.tables, Unset): 60 tables = [] 61 for tables_item_data in self.tables: 62 tables_item = tables_item_data.to_dict() 63 tables.append(tables_item) 64 65 artifacts: Union[Unset, List[Dict[str, Any]]] = UNSET 66 if not isinstance(self.artifacts, Unset): 67 artifacts = [] 68 for artifacts_item_data in self.artifacts: 69 artifacts_item = artifacts_item_data.to_dict() 70 artifacts.append(artifacts_item) 71 72 field_dict: Dict[str, Any] = {} 73 field_dict.update(self.additional_properties) 74 field_dict.update({}) 75 if domain is not UNSET: 76 field_dict["domain"] = domain 77 if files is not UNSET: 78 field_dict["files"] = files 79 if total_files is not UNSET: 80 field_dict["totalFiles"] = total_files 81 if viz is not UNSET: 82 field_dict["viz"] = viz 83 if tables is not UNSET: 84 field_dict["tables"] = tables 85 if artifacts is not UNSET: 86 field_dict["artifacts"] = artifacts 87 88 return field_dict
90 @classmethod 91 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 92 from ..models.artifact import Artifact 93 from ..models.dataset_viz import DatasetViz 94 from ..models.file_entry import FileEntry 95 from ..models.table import Table 96 97 d = src_dict.copy() 98 domain = d.pop("domain", UNSET) 99 100 files = [] 101 _files = d.pop("files", UNSET) 102 for files_item_data in _files or []: 103 files_item = FileEntry.from_dict(files_item_data) 104 105 files.append(files_item) 106 107 total_files = d.pop("totalFiles", UNSET) 108 109 viz = [] 110 _viz = d.pop("viz", UNSET) 111 for viz_item_data in _viz or []: 112 viz_item = DatasetViz.from_dict(viz_item_data) 113 114 viz.append(viz_item) 115 116 tables = [] 117 _tables = d.pop("tables", UNSET) 118 for tables_item_data in _tables or []: 119 tables_item = Table.from_dict(tables_item_data) 120 121 tables.append(tables_item) 122 123 artifacts = [] 124 _artifacts = d.pop("artifacts", UNSET) 125 for artifacts_item_data in _artifacts or []: 126 artifacts_item = Artifact.from_dict(artifacts_item_data) 127 128 artifacts.append(artifacts_item) 129 130 dataset_assets_manifest = cls( 131 domain=domain, 132 files=files, 133 total_files=total_files, 134 viz=viz, 135 tables=tables, 136 artifacts=artifacts, 137 ) 138 139 dataset_assets_manifest.additional_properties = d 140 return dataset_assets_manifest
12@_attrs_define 13class DatasetCondition: 14 """ 15 Attributes: 16 field (DatasetConditionField): 17 value (str): 18 """ 19 20 field: DatasetConditionField 21 value: str 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 field = self.field.value 26 27 value = self.value 28 29 field_dict: Dict[str, Any] = {} 30 field_dict.update(self.additional_properties) 31 field_dict.update( 32 { 33 "field": field, 34 "value": value, 35 } 36 ) 37 38 return field_dict 39 40 @classmethod 41 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 42 d = src_dict.copy() 43 field = DatasetConditionField(d.pop("field")) 44 45 value = d.pop("value") 46 47 dataset_condition = cls( 48 field=field, 49 value=value, 50 ) 51 52 dataset_condition.additional_properties = d 53 return dataset_condition 54 55 @property 56 def additional_keys(self) -> List[str]: 57 return list(self.additional_properties.keys())
Attributes:
- field (DatasetConditionField):
- value (str):
25def __init__(self, field, value): 26 self.field = field 27 self.value = value 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DatasetCondition.
40 @classmethod 41 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 42 d = src_dict.copy() 43 field = DatasetConditionField(d.pop("field")) 44 45 value = d.pop("value") 46 47 dataset_condition = cls( 48 field=field, 49 value=value, 50 ) 51 52 dataset_condition.additional_properties = d 53 return dataset_condition
5class DatasetConditionField(str, Enum): 6 CREATED_BY = "CREATED_BY" 7 DATASET_ID = "DATASET_ID" 8 PROCESS_ID = "PROCESS_ID" 9 TAG = "TAG" 10 UNKNOWN = "UNKNOWN" 11 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 12 13 def __str__(self) -> str: 14 return str(self.value) 15 16 @classmethod 17 def _missing_(cls, number): 18 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
23@_attrs_define 24class DatasetDetail: 25 """ 26 Attributes: 27 id (str): 28 name (str): 29 description (str): 30 s3 (str): 31 process_id (str): 32 project_id (str): 33 source_dataset_ids (List[str]): 34 source_datasets (List['NamedItem']): 35 source_sample_ids (List[str]): 36 source_sample_files_map (DatasetDetailSourceSampleFilesMap): Keys are sampleIds, and the lists are file paths to 37 include. 38 status (Status): 39 status_message (str): 40 tags (List['Tag']): 41 params (DatasetDetailParams): 42 info (DatasetDetailInfo): 43 is_view_restricted (bool): 44 created_by (str): 45 created_at (datetime.datetime): 46 updated_at (datetime.datetime): 47 originating_project_id (Union[Unset, str]): The originating project ID might be different if the dataset was 48 shared from another project. 49 share (Union['NamedItem', None, Unset]): 50 """ 51 52 id: str 53 name: str 54 description: str 55 s3: str 56 process_id: str 57 project_id: str 58 source_dataset_ids: List[str] 59 source_datasets: List["NamedItem"] 60 source_sample_ids: List[str] 61 source_sample_files_map: "DatasetDetailSourceSampleFilesMap" 62 status: Status 63 status_message: str 64 tags: List["Tag"] 65 params: "DatasetDetailParams" 66 info: "DatasetDetailInfo" 67 is_view_restricted: bool 68 created_by: str 69 created_at: datetime.datetime 70 updated_at: datetime.datetime 71 originating_project_id: Union[Unset, str] = UNSET 72 share: Union["NamedItem", None, Unset] = UNSET 73 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 74 75 def to_dict(self) -> Dict[str, Any]: 76 from ..models.named_item import NamedItem 77 78 id = self.id 79 80 name = self.name 81 82 description = self.description 83 84 s3 = self.s3 85 86 process_id = self.process_id 87 88 project_id = self.project_id 89 90 source_dataset_ids = self.source_dataset_ids 91 92 source_datasets = [] 93 for source_datasets_item_data in self.source_datasets: 94 source_datasets_item = source_datasets_item_data.to_dict() 95 source_datasets.append(source_datasets_item) 96 97 source_sample_ids = self.source_sample_ids 98 99 source_sample_files_map = self.source_sample_files_map.to_dict() 100 101 status = self.status.value 102 103 status_message = self.status_message 104 105 tags = [] 106 for tags_item_data in self.tags: 107 tags_item = tags_item_data.to_dict() 108 tags.append(tags_item) 109 110 params = self.params.to_dict() 111 112 info = self.info.to_dict() 113 114 is_view_restricted = self.is_view_restricted 115 116 created_by = self.created_by 117 118 created_at = self.created_at.isoformat() 119 120 updated_at = self.updated_at.isoformat() 121 122 originating_project_id = self.originating_project_id 123 124 share: Union[Dict[str, Any], None, Unset] 125 if isinstance(self.share, Unset): 126 share = UNSET 127 elif isinstance(self.share, NamedItem): 128 share = self.share.to_dict() 129 else: 130 share = self.share 131 132 field_dict: Dict[str, Any] = {} 133 field_dict.update(self.additional_properties) 134 field_dict.update( 135 { 136 "id": id, 137 "name": name, 138 "description": description, 139 "s3": s3, 140 "processId": process_id, 141 "projectId": project_id, 142 "sourceDatasetIds": source_dataset_ids, 143 "sourceDatasets": source_datasets, 144 "sourceSampleIds": source_sample_ids, 145 "sourceSampleFilesMap": source_sample_files_map, 146 "status": status, 147 "statusMessage": status_message, 148 "tags": tags, 149 "params": params, 150 "info": info, 151 "isViewRestricted": is_view_restricted, 152 "createdBy": created_by, 153 "createdAt": created_at, 154 "updatedAt": updated_at, 155 } 156 ) 157 if originating_project_id is not UNSET: 158 field_dict["originatingProjectId"] = originating_project_id 159 if share is not UNSET: 160 field_dict["share"] = share 161 162 return field_dict 163 164 @classmethod 165 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 166 from ..models.dataset_detail_info import DatasetDetailInfo 167 from ..models.dataset_detail_params import DatasetDetailParams 168 from ..models.dataset_detail_source_sample_files_map import DatasetDetailSourceSampleFilesMap 169 from ..models.named_item import NamedItem 170 from ..models.tag import Tag 171 172 d = src_dict.copy() 173 id = d.pop("id") 174 175 name = d.pop("name") 176 177 description = d.pop("description") 178 179 s3 = d.pop("s3") 180 181 process_id = d.pop("processId") 182 183 project_id = d.pop("projectId") 184 185 source_dataset_ids = cast(List[str], d.pop("sourceDatasetIds")) 186 187 source_datasets = [] 188 _source_datasets = d.pop("sourceDatasets") 189 for source_datasets_item_data in _source_datasets: 190 source_datasets_item = NamedItem.from_dict(source_datasets_item_data) 191 192 source_datasets.append(source_datasets_item) 193 194 source_sample_ids = cast(List[str], d.pop("sourceSampleIds")) 195 196 source_sample_files_map = DatasetDetailSourceSampleFilesMap.from_dict(d.pop("sourceSampleFilesMap")) 197 198 status = Status(d.pop("status")) 199 200 status_message = d.pop("statusMessage") 201 202 tags = [] 203 _tags = d.pop("tags") 204 for tags_item_data in _tags: 205 tags_item = Tag.from_dict(tags_item_data) 206 207 tags.append(tags_item) 208 209 params = DatasetDetailParams.from_dict(d.pop("params")) 210 211 info = DatasetDetailInfo.from_dict(d.pop("info")) 212 213 is_view_restricted = d.pop("isViewRestricted") 214 215 created_by = d.pop("createdBy") 216 217 created_at = isoparse(d.pop("createdAt")) 218 219 updated_at = isoparse(d.pop("updatedAt")) 220 221 originating_project_id = d.pop("originatingProjectId", UNSET) 222 223 def _parse_share(data: object) -> Union["NamedItem", None, Unset]: 224 if data is None: 225 return data 226 if isinstance(data, Unset): 227 return data 228 try: 229 if not isinstance(data, dict): 230 raise TypeError() 231 share_type_1 = NamedItem.from_dict(data) 232 233 return share_type_1 234 except: # noqa: E722 235 pass 236 return cast(Union["NamedItem", None, Unset], data) 237 238 share = _parse_share(d.pop("share", UNSET)) 239 240 dataset_detail = cls( 241 id=id, 242 name=name, 243 description=description, 244 s3=s3, 245 process_id=process_id, 246 project_id=project_id, 247 source_dataset_ids=source_dataset_ids, 248 source_datasets=source_datasets, 249 source_sample_ids=source_sample_ids, 250 source_sample_files_map=source_sample_files_map, 251 status=status, 252 status_message=status_message, 253 tags=tags, 254 params=params, 255 info=info, 256 is_view_restricted=is_view_restricted, 257 created_by=created_by, 258 created_at=created_at, 259 updated_at=updated_at, 260 originating_project_id=originating_project_id, 261 share=share, 262 ) 263 264 dataset_detail.additional_properties = d 265 return dataset_detail 266 267 @property 268 def additional_keys(self) -> List[str]: 269 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- description (str):
- s3 (str):
- process_id (str):
- project_id (str):
- source_dataset_ids (List[str]):
- source_datasets (List['NamedItem']):
- source_sample_ids (List[str]):
- source_sample_files_map (DatasetDetailSourceSampleFilesMap): Keys are sampleIds, and the lists are file paths to include.
- status (Status):
- status_message (str):
- tags (List['Tag']):
- params (DatasetDetailParams):
- info (DatasetDetailInfo):
- is_view_restricted (bool):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- originating_project_id (Union[Unset, str]): The originating project ID might be different if the dataset was shared from another project.
- share (Union['NamedItem', None, Unset]):
44def __init__(self, id, name, description, s3, process_id, project_id, source_dataset_ids, source_datasets, source_sample_ids, source_sample_files_map, status, status_message, tags, params, info, is_view_restricted, created_by, created_at, updated_at, originating_project_id=attr_dict['originating_project_id'].default, share=attr_dict['share'].default): 45 self.id = id 46 self.name = name 47 self.description = description 48 self.s3 = s3 49 self.process_id = process_id 50 self.project_id = project_id 51 self.source_dataset_ids = source_dataset_ids 52 self.source_datasets = source_datasets 53 self.source_sample_ids = source_sample_ids 54 self.source_sample_files_map = source_sample_files_map 55 self.status = status 56 self.status_message = status_message 57 self.tags = tags 58 self.params = params 59 self.info = info 60 self.is_view_restricted = is_view_restricted 61 self.created_by = created_by 62 self.created_at = created_at 63 self.updated_at = updated_at 64 self.originating_project_id = originating_project_id 65 self.share = share 66 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DatasetDetail.
75 def to_dict(self) -> Dict[str, Any]: 76 from ..models.named_item import NamedItem 77 78 id = self.id 79 80 name = self.name 81 82 description = self.description 83 84 s3 = self.s3 85 86 process_id = self.process_id 87 88 project_id = self.project_id 89 90 source_dataset_ids = self.source_dataset_ids 91 92 source_datasets = [] 93 for source_datasets_item_data in self.source_datasets: 94 source_datasets_item = source_datasets_item_data.to_dict() 95 source_datasets.append(source_datasets_item) 96 97 source_sample_ids = self.source_sample_ids 98 99 source_sample_files_map = self.source_sample_files_map.to_dict() 100 101 status = self.status.value 102 103 status_message = self.status_message 104 105 tags = [] 106 for tags_item_data in self.tags: 107 tags_item = tags_item_data.to_dict() 108 tags.append(tags_item) 109 110 params = self.params.to_dict() 111 112 info = self.info.to_dict() 113 114 is_view_restricted = self.is_view_restricted 115 116 created_by = self.created_by 117 118 created_at = self.created_at.isoformat() 119 120 updated_at = self.updated_at.isoformat() 121 122 originating_project_id = self.originating_project_id 123 124 share: Union[Dict[str, Any], None, Unset] 125 if isinstance(self.share, Unset): 126 share = UNSET 127 elif isinstance(self.share, NamedItem): 128 share = self.share.to_dict() 129 else: 130 share = self.share 131 132 field_dict: Dict[str, Any] = {} 133 field_dict.update(self.additional_properties) 134 field_dict.update( 135 { 136 "id": id, 137 "name": name, 138 "description": description, 139 "s3": s3, 140 "processId": process_id, 141 "projectId": project_id, 142 "sourceDatasetIds": source_dataset_ids, 143 "sourceDatasets": source_datasets, 144 "sourceSampleIds": source_sample_ids, 145 "sourceSampleFilesMap": source_sample_files_map, 146 "status": status, 147 "statusMessage": status_message, 148 "tags": tags, 149 "params": params, 150 "info": info, 151 "isViewRestricted": is_view_restricted, 152 "createdBy": created_by, 153 "createdAt": created_at, 154 "updatedAt": updated_at, 155 } 156 ) 157 if originating_project_id is not UNSET: 158 field_dict["originatingProjectId"] = originating_project_id 159 if share is not UNSET: 160 field_dict["share"] = share 161 162 return field_dict
164 @classmethod 165 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 166 from ..models.dataset_detail_info import DatasetDetailInfo 167 from ..models.dataset_detail_params import DatasetDetailParams 168 from ..models.dataset_detail_source_sample_files_map import DatasetDetailSourceSampleFilesMap 169 from ..models.named_item import NamedItem 170 from ..models.tag import Tag 171 172 d = src_dict.copy() 173 id = d.pop("id") 174 175 name = d.pop("name") 176 177 description = d.pop("description") 178 179 s3 = d.pop("s3") 180 181 process_id = d.pop("processId") 182 183 project_id = d.pop("projectId") 184 185 source_dataset_ids = cast(List[str], d.pop("sourceDatasetIds")) 186 187 source_datasets = [] 188 _source_datasets = d.pop("sourceDatasets") 189 for source_datasets_item_data in _source_datasets: 190 source_datasets_item = NamedItem.from_dict(source_datasets_item_data) 191 192 source_datasets.append(source_datasets_item) 193 194 source_sample_ids = cast(List[str], d.pop("sourceSampleIds")) 195 196 source_sample_files_map = DatasetDetailSourceSampleFilesMap.from_dict(d.pop("sourceSampleFilesMap")) 197 198 status = Status(d.pop("status")) 199 200 status_message = d.pop("statusMessage") 201 202 tags = [] 203 _tags = d.pop("tags") 204 for tags_item_data in _tags: 205 tags_item = Tag.from_dict(tags_item_data) 206 207 tags.append(tags_item) 208 209 params = DatasetDetailParams.from_dict(d.pop("params")) 210 211 info = DatasetDetailInfo.from_dict(d.pop("info")) 212 213 is_view_restricted = d.pop("isViewRestricted") 214 215 created_by = d.pop("createdBy") 216 217 created_at = isoparse(d.pop("createdAt")) 218 219 updated_at = isoparse(d.pop("updatedAt")) 220 221 originating_project_id = d.pop("originatingProjectId", UNSET) 222 223 def _parse_share(data: object) -> Union["NamedItem", None, Unset]: 224 if data is None: 225 return data 226 if isinstance(data, Unset): 227 return data 228 try: 229 if not isinstance(data, dict): 230 raise TypeError() 231 share_type_1 = NamedItem.from_dict(data) 232 233 return share_type_1 234 except: # noqa: E722 235 pass 236 return cast(Union["NamedItem", None, Unset], data) 237 238 share = _parse_share(d.pop("share", UNSET)) 239 240 dataset_detail = cls( 241 id=id, 242 name=name, 243 description=description, 244 s3=s3, 245 process_id=process_id, 246 project_id=project_id, 247 source_dataset_ids=source_dataset_ids, 248 source_datasets=source_datasets, 249 source_sample_ids=source_sample_ids, 250 source_sample_files_map=source_sample_files_map, 251 status=status, 252 status_message=status_message, 253 tags=tags, 254 params=params, 255 info=info, 256 is_view_restricted=is_view_restricted, 257 created_by=created_by, 258 created_at=created_at, 259 updated_at=updated_at, 260 originating_project_id=originating_project_id, 261 share=share, 262 ) 263 264 dataset_detail.additional_properties = d 265 return dataset_detail
10@_attrs_define 11class DatasetDetailInfo: 12 """ """ 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 dataset_detail_info = cls() 27 28 dataset_detail_info.additional_properties = d 29 return dataset_detail_info 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
10@_attrs_define 11class DatasetDetailParams: 12 """ """ 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 dataset_detail_params = cls() 27 28 dataset_detail_params.additional_properties = d 29 return dataset_detail_params 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
10@_attrs_define 11class DatasetDetailSourceSampleFilesMap: 12 """Keys are sampleIds, and the lists are file paths to include.""" 13 14 additional_properties: Dict[str, List[str]] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 for prop_name, prop in self.additional_properties.items(): 19 field_dict[prop_name] = prop 20 21 field_dict.update({}) 22 23 return field_dict 24 25 @classmethod 26 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 27 d = src_dict.copy() 28 dataset_detail_source_sample_files_map = cls() 29 30 additional_properties = {} 31 for prop_name, prop_dict in d.items(): 32 additional_property = cast(List[str], prop_dict) 33 34 additional_properties[prop_name] = additional_property 35 36 dataset_detail_source_sample_files_map.additional_properties = additional_properties 37 return dataset_detail_source_sample_files_map 38 39 @property 40 def additional_keys(self) -> List[str]: 41 return list(self.additional_properties.keys())
Keys are sampleIds, and the lists are file paths to include.
Method generated by attrs for class DatasetDetailSourceSampleFilesMap.
25 @classmethod 26 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 27 d = src_dict.copy() 28 dataset_detail_source_sample_files_map = cls() 29 30 additional_properties = {} 31 for prop_name, prop_dict in d.items(): 32 additional_property = cast(List[str], prop_dict) 33 34 additional_properties[prop_name] = additional_property 35 36 dataset_detail_source_sample_files_map.additional_properties = additional_properties 37 return dataset_detail_source_sample_files_map
12@_attrs_define 13class DatasetViz: 14 """ 15 Attributes: 16 path (Union[Unset, str]): Path to viz configuration, if applicable 17 name (Union[Unset, str]): Name of viz 18 desc (Union[Unset, str]): Description of viz 19 type (Union[Unset, str]): Type of viz Example: vitescce. 20 config (Union[Unset, Any]): Config or path to config used to render viz 21 """ 22 23 path: Union[Unset, str] = UNSET 24 name: Union[Unset, str] = UNSET 25 desc: Union[Unset, str] = UNSET 26 type: Union[Unset, str] = UNSET 27 config: Union[Unset, Any] = UNSET 28 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 29 30 def to_dict(self) -> Dict[str, Any]: 31 path = self.path 32 33 name = self.name 34 35 desc = self.desc 36 37 type = self.type 38 39 config = self.config 40 41 field_dict: Dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update({}) 44 if path is not UNSET: 45 field_dict["path"] = path 46 if name is not UNSET: 47 field_dict["name"] = name 48 if desc is not UNSET: 49 field_dict["desc"] = desc 50 if type is not UNSET: 51 field_dict["type"] = type 52 if config is not UNSET: 53 field_dict["config"] = config 54 55 return field_dict 56 57 @classmethod 58 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 59 d = src_dict.copy() 60 path = d.pop("path", UNSET) 61 62 name = d.pop("name", UNSET) 63 64 desc = d.pop("desc", UNSET) 65 66 type = d.pop("type", UNSET) 67 68 config = d.pop("config", UNSET) 69 70 dataset_viz = cls( 71 path=path, 72 name=name, 73 desc=desc, 74 type=type, 75 config=config, 76 ) 77 78 dataset_viz.additional_properties = d 79 return dataset_viz 80 81 @property 82 def additional_keys(self) -> List[str]: 83 return list(self.additional_properties.keys())
Attributes:
- path (Union[Unset, str]): Path to viz configuration, if applicable
- name (Union[Unset, str]): Name of viz
- desc (Union[Unset, str]): Description of viz
- type (Union[Unset, str]): Type of viz Example: vitescce.
- config (Union[Unset, Any]): Config or path to config used to render viz
28def __init__(self, path=attr_dict['path'].default, name=attr_dict['name'].default, desc=attr_dict['desc'].default, type=attr_dict['type'].default, config=attr_dict['config'].default): 29 self.path = path 30 self.name = name 31 self.desc = desc 32 self.type = type 33 self.config = config 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DatasetViz.
30 def to_dict(self) -> Dict[str, Any]: 31 path = self.path 32 33 name = self.name 34 35 desc = self.desc 36 37 type = self.type 38 39 config = self.config 40 41 field_dict: Dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update({}) 44 if path is not UNSET: 45 field_dict["path"] = path 46 if name is not UNSET: 47 field_dict["name"] = name 48 if desc is not UNSET: 49 field_dict["desc"] = desc 50 if type is not UNSET: 51 field_dict["type"] = type 52 if config is not UNSET: 53 field_dict["config"] = config 54 55 return field_dict
57 @classmethod 58 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 59 d = src_dict.copy() 60 path = d.pop("path", UNSET) 61 62 name = d.pop("name", UNSET) 63 64 desc = d.pop("desc", UNSET) 65 66 type = d.pop("type", UNSET) 67 68 config = d.pop("config", UNSET) 69 70 dataset_viz = cls( 71 path=path, 72 name=name, 73 desc=desc, 74 type=type, 75 config=config, 76 ) 77 78 dataset_viz.additional_properties = d 79 return dataset_viz
18@_attrs_define 19class Discussion: 20 """ 21 Attributes: 22 id (str): 23 name (str): 24 description (str): 25 entity (Entity): 26 type (DiscussionType): 27 project_id (str): 28 created_by (str): 29 last_message_time (datetime.datetime): 30 created_at (datetime.datetime): 31 updated_at (datetime.datetime): 32 """ 33 34 id: str 35 name: str 36 description: str 37 entity: "Entity" 38 type: DiscussionType 39 project_id: str 40 created_by: str 41 last_message_time: datetime.datetime 42 created_at: datetime.datetime 43 updated_at: datetime.datetime 44 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 45 46 def to_dict(self) -> Dict[str, Any]: 47 id = self.id 48 49 name = self.name 50 51 description = self.description 52 53 entity = self.entity.to_dict() 54 55 type = self.type.value 56 57 project_id = self.project_id 58 59 created_by = self.created_by 60 61 last_message_time = self.last_message_time.isoformat() 62 63 created_at = self.created_at.isoformat() 64 65 updated_at = self.updated_at.isoformat() 66 67 field_dict: Dict[str, Any] = {} 68 field_dict.update(self.additional_properties) 69 field_dict.update( 70 { 71 "id": id, 72 "name": name, 73 "description": description, 74 "entity": entity, 75 "type": type, 76 "projectId": project_id, 77 "createdBy": created_by, 78 "lastMessageTime": last_message_time, 79 "createdAt": created_at, 80 "updatedAt": updated_at, 81 } 82 ) 83 84 return field_dict 85 86 @classmethod 87 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 88 from ..models.entity import Entity 89 90 d = src_dict.copy() 91 id = d.pop("id") 92 93 name = d.pop("name") 94 95 description = d.pop("description") 96 97 entity = Entity.from_dict(d.pop("entity")) 98 99 type = DiscussionType(d.pop("type")) 100 101 project_id = d.pop("projectId") 102 103 created_by = d.pop("createdBy") 104 105 last_message_time = isoparse(d.pop("lastMessageTime")) 106 107 created_at = isoparse(d.pop("createdAt")) 108 109 updated_at = isoparse(d.pop("updatedAt")) 110 111 discussion = cls( 112 id=id, 113 name=name, 114 description=description, 115 entity=entity, 116 type=type, 117 project_id=project_id, 118 created_by=created_by, 119 last_message_time=last_message_time, 120 created_at=created_at, 121 updated_at=updated_at, 122 ) 123 124 discussion.additional_properties = d 125 return discussion 126 127 @property 128 def additional_keys(self) -> List[str]: 129 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- description (str):
- entity (Entity):
- type (DiscussionType):
- project_id (str):
- created_by (str):
- last_message_time (datetime.datetime):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
33def __init__(self, id, name, description, entity, type, project_id, created_by, last_message_time, created_at, updated_at): 34 self.id = id 35 self.name = name 36 self.description = description 37 self.entity = entity 38 self.type = type 39 self.project_id = project_id 40 self.created_by = created_by 41 self.last_message_time = last_message_time 42 self.created_at = created_at 43 self.updated_at = updated_at 44 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Discussion.
46 def to_dict(self) -> Dict[str, Any]: 47 id = self.id 48 49 name = self.name 50 51 description = self.description 52 53 entity = self.entity.to_dict() 54 55 type = self.type.value 56 57 project_id = self.project_id 58 59 created_by = self.created_by 60 61 last_message_time = self.last_message_time.isoformat() 62 63 created_at = self.created_at.isoformat() 64 65 updated_at = self.updated_at.isoformat() 66 67 field_dict: Dict[str, Any] = {} 68 field_dict.update(self.additional_properties) 69 field_dict.update( 70 { 71 "id": id, 72 "name": name, 73 "description": description, 74 "entity": entity, 75 "type": type, 76 "projectId": project_id, 77 "createdBy": created_by, 78 "lastMessageTime": last_message_time, 79 "createdAt": created_at, 80 "updatedAt": updated_at, 81 } 82 ) 83 84 return field_dict
86 @classmethod 87 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 88 from ..models.entity import Entity 89 90 d = src_dict.copy() 91 id = d.pop("id") 92 93 name = d.pop("name") 94 95 description = d.pop("description") 96 97 entity = Entity.from_dict(d.pop("entity")) 98 99 type = DiscussionType(d.pop("type")) 100 101 project_id = d.pop("projectId") 102 103 created_by = d.pop("createdBy") 104 105 last_message_time = isoparse(d.pop("lastMessageTime")) 106 107 created_at = isoparse(d.pop("createdAt")) 108 109 updated_at = isoparse(d.pop("updatedAt")) 110 111 discussion = cls( 112 id=id, 113 name=name, 114 description=description, 115 entity=entity, 116 type=type, 117 project_id=project_id, 118 created_by=created_by, 119 last_message_time=last_message_time, 120 created_at=created_at, 121 updated_at=updated_at, 122 ) 123 124 discussion.additional_properties = d 125 return discussion
16@_attrs_define 17class DiscussionInput: 18 """ 19 Attributes: 20 name (str): 21 description (str): 22 entity (Entity): 23 type (DiscussionType): 24 project_id (str): 25 """ 26 27 name: str 28 description: str 29 entity: "Entity" 30 type: DiscussionType 31 project_id: str 32 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 33 34 def to_dict(self) -> Dict[str, Any]: 35 name = self.name 36 37 description = self.description 38 39 entity = self.entity.to_dict() 40 41 type = self.type.value 42 43 project_id = self.project_id 44 45 field_dict: Dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "name": name, 50 "description": description, 51 "entity": entity, 52 "type": type, 53 "projectId": project_id, 54 } 55 ) 56 57 return field_dict 58 59 @classmethod 60 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 61 from ..models.entity import Entity 62 63 d = src_dict.copy() 64 name = d.pop("name") 65 66 description = d.pop("description") 67 68 entity = Entity.from_dict(d.pop("entity")) 69 70 type = DiscussionType(d.pop("type")) 71 72 project_id = d.pop("projectId") 73 74 discussion_input = cls( 75 name=name, 76 description=description, 77 entity=entity, 78 type=type, 79 project_id=project_id, 80 ) 81 82 discussion_input.additional_properties = d 83 return discussion_input 84 85 @property 86 def additional_keys(self) -> List[str]: 87 return list(self.additional_properties.keys())
Attributes:
- name (str):
- description (str):
- entity (Entity):
- type (DiscussionType):
- project_id (str):
28def __init__(self, name, description, entity, type, project_id): 29 self.name = name 30 self.description = description 31 self.entity = entity 32 self.type = type 33 self.project_id = project_id 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class DiscussionInput.
34 def to_dict(self) -> Dict[str, Any]: 35 name = self.name 36 37 description = self.description 38 39 entity = self.entity.to_dict() 40 41 type = self.type.value 42 43 project_id = self.project_id 44 45 field_dict: Dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "name": name, 50 "description": description, 51 "entity": entity, 52 "type": type, 53 "projectId": project_id, 54 } 55 ) 56 57 return field_dict
59 @classmethod 60 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 61 from ..models.entity import Entity 62 63 d = src_dict.copy() 64 name = d.pop("name") 65 66 description = d.pop("description") 67 68 entity = Entity.from_dict(d.pop("entity")) 69 70 type = DiscussionType(d.pop("type")) 71 72 project_id = d.pop("projectId") 73 74 discussion_input = cls( 75 name=name, 76 description=description, 77 entity=entity, 78 type=type, 79 project_id=project_id, 80 ) 81 82 discussion_input.additional_properties = d 83 return discussion_input
5class DiscussionType(str, Enum): 6 DISCUSSION = "DISCUSSION" 7 NOTES = "NOTES" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
12@_attrs_define 13class Entity: 14 """ 15 Attributes: 16 type (EntityType): 17 id (str): 18 """ 19 20 type: EntityType 21 id: str 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 type = self.type.value 26 27 id = self.id 28 29 field_dict: Dict[str, Any] = {} 30 field_dict.update(self.additional_properties) 31 field_dict.update( 32 { 33 "type": type, 34 "id": id, 35 } 36 ) 37 38 return field_dict 39 40 @classmethod 41 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 42 d = src_dict.copy() 43 type = EntityType(d.pop("type")) 44 45 id = d.pop("id") 46 47 entity = cls( 48 type=type, 49 id=id, 50 ) 51 52 entity.additional_properties = d 53 return entity 54 55 @property 56 def additional_keys(self) -> List[str]: 57 return list(self.additional_properties.keys())
Attributes:
- type (EntityType):
- id (str):
25def __init__(self, type, id): 26 self.type = type 27 self.id = id 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Entity.
5class EntityType(str, Enum): 6 DATASET = "DATASET" 7 DISCUSSION = "DISCUSSION" 8 NOTEBOOK = "NOTEBOOK" 9 PROCESS = "PROCESS" 10 PROJECT = "PROJECT" 11 REFERENCE = "REFERENCE" 12 SAMPLE = "SAMPLE" 13 SHARE = "SHARE" 14 TAG = "TAG" 15 UNKNOWN = "UNKNOWN" 16 USER = "USER" 17 18 def __str__(self) -> str: 19 return str(self.value) 20 21 @classmethod 22 def _missing_(cls, number): 23 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
5class EnvironmentType(str, Enum): 6 AWS_BATCH = "AWS_BATCH" 7 AWS_OMICS = "AWS_OMICS" 8 LOCAL_AGENT = "LOCAL_AGENT" 9 NONE = "NONE" 10 UNKNOWN = "UNKNOWN" 11 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 12 13 def __str__(self) -> str: 14 return str(self.value) 15 16 @classmethod 17 def _missing_(cls, number): 18 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
10@_attrs_define 11class ErrorMessage: 12 """ 13 Attributes: 14 message (str): 15 """ 16 17 message: str 18 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 19 20 def to_dict(self) -> Dict[str, Any]: 21 message = self.message 22 23 field_dict: Dict[str, Any] = {} 24 field_dict.update(self.additional_properties) 25 field_dict.update( 26 { 27 "message": message, 28 } 29 ) 30 31 return field_dict 32 33 @classmethod 34 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 35 d = src_dict.copy() 36 message = d.pop("message") 37 38 error_message = cls( 39 message=message, 40 ) 41 42 error_message.additional_properties = d 43 return error_message 44 45 @property 46 def additional_keys(self) -> List[str]: 47 return list(self.additional_properties.keys())
Attributes:
- message (str):
24def __init__(self, message): 25 self.message = message 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ErrorMessage.
5class Executor(str, Enum): 6 CROMWELL = "CROMWELL" 7 INGEST = "INGEST" 8 NEXTFLOW = "NEXTFLOW" 9 OMICS_READY2RUN = "OMICS_READY2RUN" 10 UNKNOWN = "UNKNOWN" 11 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 12 13 def __str__(self) -> str: 14 return str(self.value) 15 16 @classmethod 17 def _missing_(cls, number): 18 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
10@_attrs_define 11class FeatureFlags: 12 """ 13 Attributes: 14 sftp_enabled (bool): 15 governance_enabled (bool): 16 project_requests_enabled (bool): 17 """ 18 19 sftp_enabled: bool 20 governance_enabled: bool 21 project_requests_enabled: bool 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 sftp_enabled = self.sftp_enabled 26 27 governance_enabled = self.governance_enabled 28 29 project_requests_enabled = self.project_requests_enabled 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "sftpEnabled": sftp_enabled, 36 "governanceEnabled": governance_enabled, 37 "projectRequestsEnabled": project_requests_enabled, 38 } 39 ) 40 41 return field_dict 42 43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 sftp_enabled = d.pop("sftpEnabled") 47 48 governance_enabled = d.pop("governanceEnabled") 49 50 project_requests_enabled = d.pop("projectRequestsEnabled") 51 52 feature_flags = cls( 53 sftp_enabled=sftp_enabled, 54 governance_enabled=governance_enabled, 55 project_requests_enabled=project_requests_enabled, 56 ) 57 58 feature_flags.additional_properties = d 59 return feature_flags 60 61 @property 62 def additional_keys(self) -> List[str]: 63 return list(self.additional_properties.keys())
Attributes:
- sftp_enabled (bool):
- governance_enabled (bool):
- project_requests_enabled (bool):
26def __init__(self, sftp_enabled, governance_enabled, project_requests_enabled): 27 self.sftp_enabled = sftp_enabled 28 self.governance_enabled = governance_enabled 29 self.project_requests_enabled = project_requests_enabled 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FeatureFlags.
24 def to_dict(self) -> Dict[str, Any]: 25 sftp_enabled = self.sftp_enabled 26 27 governance_enabled = self.governance_enabled 28 29 project_requests_enabled = self.project_requests_enabled 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "sftpEnabled": sftp_enabled, 36 "governanceEnabled": governance_enabled, 37 "projectRequestsEnabled": project_requests_enabled, 38 } 39 ) 40 41 return field_dict
43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 sftp_enabled = d.pop("sftpEnabled") 47 48 governance_enabled = d.pop("governanceEnabled") 49 50 project_requests_enabled = d.pop("projectRequestsEnabled") 51 52 feature_flags = cls( 53 sftp_enabled=sftp_enabled, 54 governance_enabled=governance_enabled, 55 project_requests_enabled=project_requests_enabled, 56 ) 57 58 feature_flags.additional_properties = d 59 return feature_flags
16@_attrs_define 17class FileEntry: 18 """ 19 Attributes: 20 path (Union[Unset, str]): Relative path to file Example: data/fastq/SRX12875516_SRR16674827_1.fastq.gz. 21 size (Union[Unset, int]): File size (in bytes) Example: 1435658507. 22 metadata (Union[Unset, FileEntryMetadata]): Metadata associated with the file Example: {'read': 1}. 23 """ 24 25 path: Union[Unset, str] = UNSET 26 size: Union[Unset, int] = UNSET 27 metadata: Union[Unset, "FileEntryMetadata"] = UNSET 28 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 29 30 def to_dict(self) -> Dict[str, Any]: 31 path = self.path 32 33 size = self.size 34 35 metadata: Union[Unset, Dict[str, Any]] = UNSET 36 if not isinstance(self.metadata, Unset): 37 metadata = self.metadata.to_dict() 38 39 field_dict: Dict[str, Any] = {} 40 field_dict.update(self.additional_properties) 41 field_dict.update({}) 42 if path is not UNSET: 43 field_dict["path"] = path 44 if size is not UNSET: 45 field_dict["size"] = size 46 if metadata is not UNSET: 47 field_dict["metadata"] = metadata 48 49 return field_dict 50 51 @classmethod 52 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 53 from ..models.file_entry_metadata import FileEntryMetadata 54 55 d = src_dict.copy() 56 path = d.pop("path", UNSET) 57 58 size = d.pop("size", UNSET) 59 60 _metadata = d.pop("metadata", UNSET) 61 metadata: Union[Unset, FileEntryMetadata] 62 if isinstance(_metadata, Unset): 63 metadata = UNSET 64 else: 65 metadata = FileEntryMetadata.from_dict(_metadata) 66 67 file_entry = cls( 68 path=path, 69 size=size, 70 metadata=metadata, 71 ) 72 73 file_entry.additional_properties = d 74 return file_entry 75 76 @property 77 def additional_keys(self) -> List[str]: 78 return list(self.additional_properties.keys())
Attributes:
- path (Union[Unset, str]): Relative path to file Example: data/fastq/SRX12875516_SRR16674827_1.fastq.gz.
- size (Union[Unset, int]): File size (in bytes) Example: 1435658507.
- metadata (Union[Unset, FileEntryMetadata]): Metadata associated with the file Example: {'read': 1}.
26def __init__(self, path=attr_dict['path'].default, size=attr_dict['size'].default, metadata=attr_dict['metadata'].default): 27 self.path = path 28 self.size = size 29 self.metadata = metadata 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FileEntry.
30 def to_dict(self) -> Dict[str, Any]: 31 path = self.path 32 33 size = self.size 34 35 metadata: Union[Unset, Dict[str, Any]] = UNSET 36 if not isinstance(self.metadata, Unset): 37 metadata = self.metadata.to_dict() 38 39 field_dict: Dict[str, Any] = {} 40 field_dict.update(self.additional_properties) 41 field_dict.update({}) 42 if path is not UNSET: 43 field_dict["path"] = path 44 if size is not UNSET: 45 field_dict["size"] = size 46 if metadata is not UNSET: 47 field_dict["metadata"] = metadata 48 49 return field_dict
51 @classmethod 52 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 53 from ..models.file_entry_metadata import FileEntryMetadata 54 55 d = src_dict.copy() 56 path = d.pop("path", UNSET) 57 58 size = d.pop("size", UNSET) 59 60 _metadata = d.pop("metadata", UNSET) 61 metadata: Union[Unset, FileEntryMetadata] 62 if isinstance(_metadata, Unset): 63 metadata = UNSET 64 else: 65 metadata = FileEntryMetadata.from_dict(_metadata) 66 67 file_entry = cls( 68 path=path, 69 size=size, 70 metadata=metadata, 71 ) 72 73 file_entry.additional_properties = d 74 return file_entry
10@_attrs_define 11class FileEntryMetadata: 12 """Metadata associated with the file 13 14 Example: 15 {'read': 1} 16 17 """ 18 19 additional_properties: Dict[str, str] = _attrs_field(init=False, factory=dict) 20 21 def to_dict(self) -> Dict[str, Any]: 22 field_dict: Dict[str, Any] = {} 23 field_dict.update(self.additional_properties) 24 field_dict.update({}) 25 26 return field_dict 27 28 @classmethod 29 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 30 d = src_dict.copy() 31 file_entry_metadata = cls() 32 33 file_entry_metadata.additional_properties = d 34 return file_entry_metadata 35 36 @property 37 def additional_keys(self) -> List[str]: 38 return list(self.additional_properties.keys())
Metadata associated with the file
Example:
{'read': 1}
16@_attrs_define 17class FileMappingRule: 18 """ 19 Attributes: 20 description (str): Describes the group of possible files that meet a single file type criteria. 21 file_name_patterns (List['FileNamePattern']): Describes the possible file patterns to expect for the file type 22 group. 23 min_ (Union[Unset, int]): Minimum number of files to expect for the file type group. 24 max_ (Union[Unset, int]): Maximum number of files to expect for the file type group. 25 is_sample (Union[Unset, bool]): Specifies if the file type will be associated with a sample. 26 """ 27 28 description: str 29 file_name_patterns: List["FileNamePattern"] 30 min_: Union[Unset, int] = UNSET 31 max_: Union[Unset, int] = UNSET 32 is_sample: Union[Unset, bool] = UNSET 33 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 34 35 def to_dict(self) -> Dict[str, Any]: 36 description = self.description 37 38 file_name_patterns = [] 39 for file_name_patterns_item_data in self.file_name_patterns: 40 file_name_patterns_item = file_name_patterns_item_data.to_dict() 41 file_name_patterns.append(file_name_patterns_item) 42 43 min_ = self.min_ 44 45 max_ = self.max_ 46 47 is_sample = self.is_sample 48 49 field_dict: Dict[str, Any] = {} 50 field_dict.update(self.additional_properties) 51 field_dict.update( 52 { 53 "description": description, 54 "fileNamePatterns": file_name_patterns, 55 } 56 ) 57 if min_ is not UNSET: 58 field_dict["min"] = min_ 59 if max_ is not UNSET: 60 field_dict["max"] = max_ 61 if is_sample is not UNSET: 62 field_dict["isSample"] = is_sample 63 64 return field_dict 65 66 @classmethod 67 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 68 from ..models.file_name_pattern import FileNamePattern 69 70 d = src_dict.copy() 71 description = d.pop("description") 72 73 file_name_patterns = [] 74 _file_name_patterns = d.pop("fileNamePatterns") 75 for file_name_patterns_item_data in _file_name_patterns: 76 file_name_patterns_item = FileNamePattern.from_dict(file_name_patterns_item_data) 77 78 file_name_patterns.append(file_name_patterns_item) 79 80 min_ = d.pop("min", UNSET) 81 82 max_ = d.pop("max", UNSET) 83 84 is_sample = d.pop("isSample", UNSET) 85 86 file_mapping_rule = cls( 87 description=description, 88 file_name_patterns=file_name_patterns, 89 min_=min_, 90 max_=max_, 91 is_sample=is_sample, 92 ) 93 94 file_mapping_rule.additional_properties = d 95 return file_mapping_rule 96 97 @property 98 def additional_keys(self) -> List[str]: 99 return list(self.additional_properties.keys())
Attributes:
- description (str): Describes the group of possible files that meet a single file type criteria.
- file_name_patterns (List['FileNamePattern']): Describes the possible file patterns to expect for the file type group.
- min_ (Union[Unset, int]): Minimum number of files to expect for the file type group.
- max_ (Union[Unset, int]): Maximum number of files to expect for the file type group.
- is_sample (Union[Unset, bool]): Specifies if the file type will be associated with a sample.
28def __init__(self, description, file_name_patterns, min_=attr_dict['min_'].default, max_=attr_dict['max_'].default, is_sample=attr_dict['is_sample'].default): 29 self.description = description 30 self.file_name_patterns = file_name_patterns 31 self.min_ = min_ 32 self.max_ = max_ 33 self.is_sample = is_sample 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FileMappingRule.
35 def to_dict(self) -> Dict[str, Any]: 36 description = self.description 37 38 file_name_patterns = [] 39 for file_name_patterns_item_data in self.file_name_patterns: 40 file_name_patterns_item = file_name_patterns_item_data.to_dict() 41 file_name_patterns.append(file_name_patterns_item) 42 43 min_ = self.min_ 44 45 max_ = self.max_ 46 47 is_sample = self.is_sample 48 49 field_dict: Dict[str, Any] = {} 50 field_dict.update(self.additional_properties) 51 field_dict.update( 52 { 53 "description": description, 54 "fileNamePatterns": file_name_patterns, 55 } 56 ) 57 if min_ is not UNSET: 58 field_dict["min"] = min_ 59 if max_ is not UNSET: 60 field_dict["max"] = max_ 61 if is_sample is not UNSET: 62 field_dict["isSample"] = is_sample 63 64 return field_dict
66 @classmethod 67 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 68 from ..models.file_name_pattern import FileNamePattern 69 70 d = src_dict.copy() 71 description = d.pop("description") 72 73 file_name_patterns = [] 74 _file_name_patterns = d.pop("fileNamePatterns") 75 for file_name_patterns_item_data in _file_name_patterns: 76 file_name_patterns_item = FileNamePattern.from_dict(file_name_patterns_item_data) 77 78 file_name_patterns.append(file_name_patterns_item) 79 80 min_ = d.pop("min", UNSET) 81 82 max_ = d.pop("max", UNSET) 83 84 is_sample = d.pop("isSample", UNSET) 85 86 file_mapping_rule = cls( 87 description=description, 88 file_name_patterns=file_name_patterns, 89 min_=min_, 90 max_=max_, 91 is_sample=is_sample, 92 ) 93 94 file_mapping_rule.additional_properties = d 95 return file_mapping_rule
10@_attrs_define 11class FileNameMatch: 12 """ 13 Attributes: 14 file_name (str): 15 sample_name (str): 16 regex_pattern_match (str): 17 """ 18 19 file_name: str 20 sample_name: str 21 regex_pattern_match: str 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 file_name = self.file_name 26 27 sample_name = self.sample_name 28 29 regex_pattern_match = self.regex_pattern_match 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "fileName": file_name, 36 "sampleName": sample_name, 37 "regexPatternMatch": regex_pattern_match, 38 } 39 ) 40 41 return field_dict 42 43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 file_name = d.pop("fileName") 47 48 sample_name = d.pop("sampleName") 49 50 regex_pattern_match = d.pop("regexPatternMatch") 51 52 file_name_match = cls( 53 file_name=file_name, 54 sample_name=sample_name, 55 regex_pattern_match=regex_pattern_match, 56 ) 57 58 file_name_match.additional_properties = d 59 return file_name_match 60 61 @property 62 def additional_keys(self) -> List[str]: 63 return list(self.additional_properties.keys())
Attributes:
- file_name (str):
- sample_name (str):
- regex_pattern_match (str):
26def __init__(self, file_name, sample_name, regex_pattern_match): 27 self.file_name = file_name 28 self.sample_name = sample_name 29 self.regex_pattern_match = regex_pattern_match 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FileNameMatch.
24 def to_dict(self) -> Dict[str, Any]: 25 file_name = self.file_name 26 27 sample_name = self.sample_name 28 29 regex_pattern_match = self.regex_pattern_match 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "fileName": file_name, 36 "sampleName": sample_name, 37 "regexPatternMatch": regex_pattern_match, 38 } 39 ) 40 41 return field_dict
43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 file_name = d.pop("fileName") 47 48 sample_name = d.pop("sampleName") 49 50 regex_pattern_match = d.pop("regexPatternMatch") 51 52 file_name_match = cls( 53 file_name=file_name, 54 sample_name=sample_name, 55 regex_pattern_match=regex_pattern_match, 56 ) 57 58 file_name_match.additional_properties = d 59 return file_name_match
12@_attrs_define 13class FileNamePattern: 14 """ 15 Attributes: 16 example_name (str): User-readable name for the file type used for display. 17 sample_matching_pattern (str): File name pattern, formatted as a valid regex, to extract sample name and other 18 metadata. 19 description (Union[None, Unset, str]): File description. 20 """ 21 22 example_name: str 23 sample_matching_pattern: str 24 description: Union[None, Unset, str] = UNSET 25 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> Dict[str, Any]: 28 example_name = self.example_name 29 30 sample_matching_pattern = self.sample_matching_pattern 31 32 description: Union[None, Unset, str] 33 if isinstance(self.description, Unset): 34 description = UNSET 35 else: 36 description = self.description 37 38 field_dict: Dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "exampleName": example_name, 43 "sampleMatchingPattern": sample_matching_pattern, 44 } 45 ) 46 if description is not UNSET: 47 field_dict["description"] = description 48 49 return field_dict 50 51 @classmethod 52 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 53 d = src_dict.copy() 54 example_name = d.pop("exampleName") 55 56 sample_matching_pattern = d.pop("sampleMatchingPattern") 57 58 def _parse_description(data: object) -> Union[None, Unset, str]: 59 if data is None: 60 return data 61 if isinstance(data, Unset): 62 return data 63 return cast(Union[None, Unset, str], data) 64 65 description = _parse_description(d.pop("description", UNSET)) 66 67 file_name_pattern = cls( 68 example_name=example_name, 69 sample_matching_pattern=sample_matching_pattern, 70 description=description, 71 ) 72 73 file_name_pattern.additional_properties = d 74 return file_name_pattern 75 76 @property 77 def additional_keys(self) -> List[str]: 78 return list(self.additional_properties.keys())
Attributes:
- example_name (str): User-readable name for the file type used for display.
- sample_matching_pattern (str): File name pattern, formatted as a valid regex, to extract sample name and other metadata.
- description (Union[None, Unset, str]): File description.
26def __init__(self, example_name, sample_matching_pattern, description=attr_dict['description'].default): 27 self.example_name = example_name 28 self.sample_matching_pattern = sample_matching_pattern 29 self.description = description 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FileNamePattern.
27 def to_dict(self) -> Dict[str, Any]: 28 example_name = self.example_name 29 30 sample_matching_pattern = self.sample_matching_pattern 31 32 description: Union[None, Unset, str] 33 if isinstance(self.description, Unset): 34 description = UNSET 35 else: 36 description = self.description 37 38 field_dict: Dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "exampleName": example_name, 43 "sampleMatchingPattern": sample_matching_pattern, 44 } 45 ) 46 if description is not UNSET: 47 field_dict["description"] = description 48 49 return field_dict
51 @classmethod 52 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 53 d = src_dict.copy() 54 example_name = d.pop("exampleName") 55 56 sample_matching_pattern = d.pop("sampleMatchingPattern") 57 58 def _parse_description(data: object) -> Union[None, Unset, str]: 59 if data is None: 60 return data 61 if isinstance(data, Unset): 62 return data 63 return cast(Union[None, Unset, str], data) 64 65 description = _parse_description(d.pop("description", UNSET)) 66 67 file_name_pattern = cls( 68 example_name=example_name, 69 sample_matching_pattern=sample_matching_pattern, 70 description=description, 71 ) 72 73 file_name_pattern.additional_properties = d 74 return file_name_pattern
14@_attrs_define 15class FileRequirements: 16 """ 17 Attributes: 18 files (List[str]): 19 error_msg (str): 20 allowed_data_types (List['AllowedDataType']): 21 """ 22 23 files: List[str] 24 error_msg: str 25 allowed_data_types: List["AllowedDataType"] 26 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 27 28 def to_dict(self) -> Dict[str, Any]: 29 files = self.files 30 31 error_msg = self.error_msg 32 33 allowed_data_types = [] 34 for allowed_data_types_item_data in self.allowed_data_types: 35 allowed_data_types_item = allowed_data_types_item_data.to_dict() 36 allowed_data_types.append(allowed_data_types_item) 37 38 field_dict: Dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "files": files, 43 "errorMsg": error_msg, 44 "allowedDataTypes": allowed_data_types, 45 } 46 ) 47 48 return field_dict 49 50 @classmethod 51 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 52 from ..models.allowed_data_type import AllowedDataType 53 54 d = src_dict.copy() 55 files = cast(List[str], d.pop("files")) 56 57 error_msg = d.pop("errorMsg") 58 59 allowed_data_types = [] 60 _allowed_data_types = d.pop("allowedDataTypes") 61 for allowed_data_types_item_data in _allowed_data_types: 62 allowed_data_types_item = AllowedDataType.from_dict(allowed_data_types_item_data) 63 64 allowed_data_types.append(allowed_data_types_item) 65 66 file_requirements = cls( 67 files=files, 68 error_msg=error_msg, 69 allowed_data_types=allowed_data_types, 70 ) 71 72 file_requirements.additional_properties = d 73 return file_requirements 74 75 @property 76 def additional_keys(self) -> List[str]: 77 return list(self.additional_properties.keys())
Attributes:
- files (List[str]):
- error_msg (str):
- allowed_data_types (List['AllowedDataType']):
26def __init__(self, files, error_msg, allowed_data_types): 27 self.files = files 28 self.error_msg = error_msg 29 self.allowed_data_types = allowed_data_types 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FileRequirements.
28 def to_dict(self) -> Dict[str, Any]: 29 files = self.files 30 31 error_msg = self.error_msg 32 33 allowed_data_types = [] 34 for allowed_data_types_item_data in self.allowed_data_types: 35 allowed_data_types_item = allowed_data_types_item_data.to_dict() 36 allowed_data_types.append(allowed_data_types_item) 37 38 field_dict: Dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "files": files, 43 "errorMsg": error_msg, 44 "allowedDataTypes": allowed_data_types, 45 } 46 ) 47 48 return field_dict
50 @classmethod 51 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 52 from ..models.allowed_data_type import AllowedDataType 53 54 d = src_dict.copy() 55 files = cast(List[str], d.pop("files")) 56 57 error_msg = d.pop("errorMsg") 58 59 allowed_data_types = [] 60 _allowed_data_types = d.pop("allowedDataTypes") 61 for allowed_data_types_item_data in _allowed_data_types: 62 allowed_data_types_item = AllowedDataType.from_dict(allowed_data_types_item_data) 63 64 allowed_data_types.append(allowed_data_types_item) 65 66 file_requirements = cls( 67 files=files, 68 error_msg=error_msg, 69 allowed_data_types=allowed_data_types, 70 ) 71 72 file_requirements.additional_properties = d 73 return file_requirements
17@_attrs_define 18class FormSchema: 19 """ 20 Attributes: 21 form (Union[Unset, FormSchemaForm]): JSONSchema representation of the parameters 22 ui (Union[Unset, FormSchemaUi]): Describes how the form should be rendered, see rjsf 23 """ 24 25 form: Union[Unset, "FormSchemaForm"] = UNSET 26 ui: Union[Unset, "FormSchemaUi"] = UNSET 27 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> Dict[str, Any]: 30 form: Union[Unset, Dict[str, Any]] = UNSET 31 if not isinstance(self.form, Unset): 32 form = self.form.to_dict() 33 34 ui: Union[Unset, Dict[str, Any]] = UNSET 35 if not isinstance(self.ui, Unset): 36 ui = self.ui.to_dict() 37 38 field_dict: Dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update({}) 41 if form is not UNSET: 42 field_dict["form"] = form 43 if ui is not UNSET: 44 field_dict["ui"] = ui 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 50 from ..models.form_schema_form import FormSchemaForm 51 from ..models.form_schema_ui import FormSchemaUi 52 53 d = src_dict.copy() 54 _form = d.pop("form", UNSET) 55 form: Union[Unset, FormSchemaForm] 56 if isinstance(_form, Unset): 57 form = UNSET 58 else: 59 form = FormSchemaForm.from_dict(_form) 60 61 _ui = d.pop("ui", UNSET) 62 ui: Union[Unset, FormSchemaUi] 63 if isinstance(_ui, Unset): 64 ui = UNSET 65 else: 66 ui = FormSchemaUi.from_dict(_ui) 67 68 form_schema = cls( 69 form=form, 70 ui=ui, 71 ) 72 73 form_schema.additional_properties = d 74 return form_schema 75 76 @property 77 def additional_keys(self) -> List[str]: 78 return list(self.additional_properties.keys())
Attributes:
- form (Union[Unset, FormSchemaForm]): JSONSchema representation of the parameters
- ui (Union[Unset, FormSchemaUi]): Describes how the form should be rendered, see rjsf
25def __init__(self, form=attr_dict['form'].default, ui=attr_dict['ui'].default): 26 self.form = form 27 self.ui = ui 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FormSchema.
29 def to_dict(self) -> Dict[str, Any]: 30 form: Union[Unset, Dict[str, Any]] = UNSET 31 if not isinstance(self.form, Unset): 32 form = self.form.to_dict() 33 34 ui: Union[Unset, Dict[str, Any]] = UNSET 35 if not isinstance(self.ui, Unset): 36 ui = self.ui.to_dict() 37 38 field_dict: Dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update({}) 41 if form is not UNSET: 42 field_dict["form"] = form 43 if ui is not UNSET: 44 field_dict["ui"] = ui 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 50 from ..models.form_schema_form import FormSchemaForm 51 from ..models.form_schema_ui import FormSchemaUi 52 53 d = src_dict.copy() 54 _form = d.pop("form", UNSET) 55 form: Union[Unset, FormSchemaForm] 56 if isinstance(_form, Unset): 57 form = UNSET 58 else: 59 form = FormSchemaForm.from_dict(_form) 60 61 _ui = d.pop("ui", UNSET) 62 ui: Union[Unset, FormSchemaUi] 63 if isinstance(_ui, Unset): 64 ui = UNSET 65 else: 66 ui = FormSchemaUi.from_dict(_ui) 67 68 form_schema = cls( 69 form=form, 70 ui=ui, 71 ) 72 73 form_schema.additional_properties = d 74 return form_schema
10@_attrs_define 11class FormSchemaForm: 12 """JSONSchema representation of the parameters""" 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 form_schema_form = cls() 27 28 form_schema_form.additional_properties = d 29 return form_schema_form 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
JSONSchema representation of the parameters
10@_attrs_define 11class FormSchemaUi: 12 """Describes how the form should be rendered, see rjsf""" 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 form_schema_ui = cls() 27 28 form_schema_ui.additional_properties = d 29 return form_schema_ui 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
Describes how the form should be rendered, see rjsf
10@_attrs_define 11class FulfillmentResponse: 12 """ 13 Attributes: 14 fulfillment_id (str): 15 path (str): 16 """ 17 18 fulfillment_id: str 19 path: str 20 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 21 22 def to_dict(self) -> Dict[str, Any]: 23 fulfillment_id = self.fulfillment_id 24 25 path = self.path 26 27 field_dict: Dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "fulfillmentId": fulfillment_id, 32 "path": path, 33 } 34 ) 35 36 return field_dict 37 38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 fulfillment_id = d.pop("fulfillmentId") 42 43 path = d.pop("path") 44 45 fulfillment_response = cls( 46 fulfillment_id=fulfillment_id, 47 path=path, 48 ) 49 50 fulfillment_response.additional_properties = d 51 return fulfillment_response 52 53 @property 54 def additional_keys(self) -> List[str]: 55 return list(self.additional_properties.keys())
Attributes:
- fulfillment_id (str):
- path (str):
25def __init__(self, fulfillment_id, path): 26 self.fulfillment_id = fulfillment_id 27 self.path = path 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class FulfillmentResponse.
22 def to_dict(self) -> Dict[str, Any]: 23 fulfillment_id = self.fulfillment_id 24 25 path = self.path 26 27 field_dict: Dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "fulfillmentId": fulfillment_id, 32 "path": path, 33 } 34 ) 35 36 return field_dict
38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 fulfillment_id = d.pop("fulfillmentId") 42 43 path = d.pop("path") 44 45 fulfillment_response = cls( 46 fulfillment_id=fulfillment_id, 47 path=path, 48 ) 49 50 fulfillment_response.additional_properties = d 51 return fulfillment_response
12@_attrs_define 13class GenerateSftpCredentialsRequest: 14 """ 15 Attributes: 16 lifetime_days (Union[Unset, int]): Number of days the credentials are valid for Default: 1. 17 """ 18 19 lifetime_days: Union[Unset, int] = 1 20 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 21 22 def to_dict(self) -> Dict[str, Any]: 23 lifetime_days = self.lifetime_days 24 25 field_dict: Dict[str, Any] = {} 26 field_dict.update(self.additional_properties) 27 field_dict.update({}) 28 if lifetime_days is not UNSET: 29 field_dict["lifetimeDays"] = lifetime_days 30 31 return field_dict 32 33 @classmethod 34 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 35 d = src_dict.copy() 36 lifetime_days = d.pop("lifetimeDays", UNSET) 37 38 generate_sftp_credentials_request = cls( 39 lifetime_days=lifetime_days, 40 ) 41 42 generate_sftp_credentials_request.additional_properties = d 43 return generate_sftp_credentials_request 44 45 @property 46 def additional_keys(self) -> List[str]: 47 return list(self.additional_properties.keys())
Attributes:
- lifetime_days (Union[Unset, int]): Number of days the credentials are valid for Default: 1.
24def __init__(self, lifetime_days=attr_dict['lifetime_days'].default): 25 self.lifetime_days = lifetime_days 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GenerateSftpCredentialsRequest.
33 @classmethod 34 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 35 d = src_dict.copy() 36 lifetime_days = d.pop("lifetimeDays", UNSET) 37 38 generate_sftp_credentials_request = cls( 39 lifetime_days=lifetime_days, 40 ) 41 42 generate_sftp_credentials_request.additional_properties = d 43 return generate_sftp_credentials_request
14@_attrs_define 15class GetExecutionLogsResponse: 16 """ 17 Attributes: 18 events (List['LogEntry']): 19 """ 20 21 events: List["LogEntry"] 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 events = [] 26 for events_item_data in self.events: 27 events_item = events_item_data.to_dict() 28 events.append(events_item) 29 30 field_dict: Dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "events": events, 35 } 36 ) 37 38 return field_dict 39 40 @classmethod 41 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 42 from ..models.log_entry import LogEntry 43 44 d = src_dict.copy() 45 events = [] 46 _events = d.pop("events") 47 for events_item_data in _events: 48 events_item = LogEntry.from_dict(events_item_data) 49 50 events.append(events_item) 51 52 get_execution_logs_response = cls( 53 events=events, 54 ) 55 56 get_execution_logs_response.additional_properties = d 57 return get_execution_logs_response 58 59 @property 60 def additional_keys(self) -> List[str]: 61 return list(self.additional_properties.keys())
Attributes:
- events (List['LogEntry']):
24def __init__(self, events): 25 self.events = events 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GetExecutionLogsResponse.
24 def to_dict(self) -> Dict[str, Any]: 25 events = [] 26 for events_item_data in self.events: 27 events_item = events_item_data.to_dict() 28 events.append(events_item) 29 30 field_dict: Dict[str, Any] = {} 31 field_dict.update(self.additional_properties) 32 field_dict.update( 33 { 34 "events": events, 35 } 36 ) 37 38 return field_dict
40 @classmethod 41 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 42 from ..models.log_entry import LogEntry 43 44 d = src_dict.copy() 45 events = [] 46 _events = d.pop("events") 47 for events_item_data in _events: 48 events_item = LogEntry.from_dict(events_item_data) 49 50 events.append(events_item) 51 52 get_execution_logs_response = cls( 53 events=events, 54 ) 55 56 get_execution_logs_response.additional_properties = d 57 return get_execution_logs_response
14@_attrs_define 15class GetProjectSummaryResponse200: 16 """ """ 17 18 additional_properties: Dict[str, List["Task"]] = _attrs_field(init=False, factory=dict) 19 20 def to_dict(self) -> Dict[str, Any]: 21 field_dict: Dict[str, Any] = {} 22 for prop_name, prop in self.additional_properties.items(): 23 field_dict[prop_name] = [] 24 for additional_property_item_data in prop: 25 additional_property_item = additional_property_item_data.to_dict() 26 field_dict[prop_name].append(additional_property_item) 27 28 field_dict.update({}) 29 30 return field_dict 31 32 @classmethod 33 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 34 from ..models.task import Task 35 36 d = src_dict.copy() 37 get_project_summary_response_200 = cls() 38 39 additional_properties = {} 40 for prop_name, prop_dict in d.items(): 41 additional_property = [] 42 _additional_property = prop_dict 43 for additional_property_item_data in _additional_property: 44 additional_property_item = Task.from_dict(additional_property_item_data) 45 46 additional_property.append(additional_property_item) 47 48 additional_properties[prop_name] = additional_property 49 50 get_project_summary_response_200.additional_properties = additional_properties 51 return get_project_summary_response_200 52 53 @property 54 def additional_keys(self) -> List[str]: 55 return list(self.additional_properties.keys())
20 def to_dict(self) -> Dict[str, Any]: 21 field_dict: Dict[str, Any] = {} 22 for prop_name, prop in self.additional_properties.items(): 23 field_dict[prop_name] = [] 24 for additional_property_item_data in prop: 25 additional_property_item = additional_property_item_data.to_dict() 26 field_dict[prop_name].append(additional_property_item) 27 28 field_dict.update({}) 29 30 return field_dict
32 @classmethod 33 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 34 from ..models.task import Task 35 36 d = src_dict.copy() 37 get_project_summary_response_200 = cls() 38 39 additional_properties = {} 40 for prop_name, prop_dict in d.items(): 41 additional_property = [] 42 _additional_property = prop_dict 43 for additional_property_item_data in _additional_property: 44 additional_property_item = Task.from_dict(additional_property_item_data) 45 46 additional_property.append(additional_property_item) 47 48 additional_properties[prop_name] = additional_property 49 50 get_project_summary_response_200.additional_properties = additional_properties 51 return get_project_summary_response_200
5class GovernanceAccessType(str, Enum): 6 FULFILLMENT_DOWNLOAD = "FULFILLMENT_DOWNLOAD" 7 FULFILLMENT_UPLOAD = "FULFILLMENT_UPLOAD" 8 GOVERNANCE_DOWNLOAD = "GOVERNANCE_DOWNLOAD" 9 GOVERNANCE_UPLOAD = "GOVERNANCE_UPLOAD" 10 UNKNOWN = "UNKNOWN" 11 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 12 13 def __str__(self) -> str: 14 return str(self.value) 15 16 @classmethod 17 def _missing_(cls, number): 18 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
12@_attrs_define 13class GovernanceClassification: 14 """ 15 Attributes: 16 id (str): 17 name (str): 18 description (str): 19 requirement_ids (List[str]): 20 created_by (str): 21 created_at (datetime.datetime): 22 updated_at (datetime.datetime): 23 """ 24 25 id: str 26 name: str 27 description: str 28 requirement_ids: List[str] 29 created_by: str 30 created_at: datetime.datetime 31 updated_at: datetime.datetime 32 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 33 34 def to_dict(self) -> Dict[str, Any]: 35 id = self.id 36 37 name = self.name 38 39 description = self.description 40 41 requirement_ids = self.requirement_ids 42 43 created_by = self.created_by 44 45 created_at = self.created_at.isoformat() 46 47 updated_at = self.updated_at.isoformat() 48 49 field_dict: Dict[str, Any] = {} 50 field_dict.update(self.additional_properties) 51 field_dict.update( 52 { 53 "id": id, 54 "name": name, 55 "description": description, 56 "requirementIds": requirement_ids, 57 "createdBy": created_by, 58 "createdAt": created_at, 59 "updatedAt": updated_at, 60 } 61 ) 62 63 return field_dict 64 65 @classmethod 66 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 67 d = src_dict.copy() 68 id = d.pop("id") 69 70 name = d.pop("name") 71 72 description = d.pop("description") 73 74 requirement_ids = cast(List[str], d.pop("requirementIds")) 75 76 created_by = d.pop("createdBy") 77 78 created_at = isoparse(d.pop("createdAt")) 79 80 updated_at = isoparse(d.pop("updatedAt")) 81 82 governance_classification = cls( 83 id=id, 84 name=name, 85 description=description, 86 requirement_ids=requirement_ids, 87 created_by=created_by, 88 created_at=created_at, 89 updated_at=updated_at, 90 ) 91 92 governance_classification.additional_properties = d 93 return governance_classification 94 95 @property 96 def additional_keys(self) -> List[str]: 97 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- description (str):
- requirement_ids (List[str]):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
30def __init__(self, id, name, description, requirement_ids, created_by, created_at, updated_at): 31 self.id = id 32 self.name = name 33 self.description = description 34 self.requirement_ids = requirement_ids 35 self.created_by = created_by 36 self.created_at = created_at 37 self.updated_at = updated_at 38 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceClassification.
34 def to_dict(self) -> Dict[str, Any]: 35 id = self.id 36 37 name = self.name 38 39 description = self.description 40 41 requirement_ids = self.requirement_ids 42 43 created_by = self.created_by 44 45 created_at = self.created_at.isoformat() 46 47 updated_at = self.updated_at.isoformat() 48 49 field_dict: Dict[str, Any] = {} 50 field_dict.update(self.additional_properties) 51 field_dict.update( 52 { 53 "id": id, 54 "name": name, 55 "description": description, 56 "requirementIds": requirement_ids, 57 "createdBy": created_by, 58 "createdAt": created_at, 59 "updatedAt": updated_at, 60 } 61 ) 62 63 return field_dict
65 @classmethod 66 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 67 d = src_dict.copy() 68 id = d.pop("id") 69 70 name = d.pop("name") 71 72 description = d.pop("description") 73 74 requirement_ids = cast(List[str], d.pop("requirementIds")) 75 76 created_by = d.pop("createdBy") 77 78 created_at = isoparse(d.pop("createdAt")) 79 80 updated_at = isoparse(d.pop("updatedAt")) 81 82 governance_classification = cls( 83 id=id, 84 name=name, 85 description=description, 86 requirement_ids=requirement_ids, 87 created_by=created_by, 88 created_at=created_at, 89 updated_at=updated_at, 90 ) 91 92 governance_classification.additional_properties = d 93 return governance_classification
12@_attrs_define 13class GovernanceContact: 14 """ 15 Attributes: 16 id (str): 17 title (str): 18 description (str): 19 name (str): 20 phone (str): 21 email (str): 22 created_by (str): 23 created_at (datetime.datetime): 24 updated_at (datetime.datetime): 25 """ 26 27 id: str 28 title: str 29 description: str 30 name: str 31 phone: str 32 email: str 33 created_by: str 34 created_at: datetime.datetime 35 updated_at: datetime.datetime 36 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 37 38 def to_dict(self) -> Dict[str, Any]: 39 id = self.id 40 41 title = self.title 42 43 description = self.description 44 45 name = self.name 46 47 phone = self.phone 48 49 email = self.email 50 51 created_by = self.created_by 52 53 created_at = self.created_at.isoformat() 54 55 updated_at = self.updated_at.isoformat() 56 57 field_dict: Dict[str, Any] = {} 58 field_dict.update(self.additional_properties) 59 field_dict.update( 60 { 61 "id": id, 62 "title": title, 63 "description": description, 64 "name": name, 65 "phone": phone, 66 "email": email, 67 "createdBy": created_by, 68 "createdAt": created_at, 69 "updatedAt": updated_at, 70 } 71 ) 72 73 return field_dict 74 75 @classmethod 76 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 77 d = src_dict.copy() 78 id = d.pop("id") 79 80 title = d.pop("title") 81 82 description = d.pop("description") 83 84 name = d.pop("name") 85 86 phone = d.pop("phone") 87 88 email = d.pop("email") 89 90 created_by = d.pop("createdBy") 91 92 created_at = isoparse(d.pop("createdAt")) 93 94 updated_at = isoparse(d.pop("updatedAt")) 95 96 governance_contact = cls( 97 id=id, 98 title=title, 99 description=description, 100 name=name, 101 phone=phone, 102 email=email, 103 created_by=created_by, 104 created_at=created_at, 105 updated_at=updated_at, 106 ) 107 108 governance_contact.additional_properties = d 109 return governance_contact 110 111 @property 112 def additional_keys(self) -> List[str]: 113 return list(self.additional_properties.keys())
Attributes:
- id (str):
- title (str):
- description (str):
- name (str):
- phone (str):
- email (str):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
32def __init__(self, id, title, description, name, phone, email, created_by, created_at, updated_at): 33 self.id = id 34 self.title = title 35 self.description = description 36 self.name = name 37 self.phone = phone 38 self.email = email 39 self.created_by = created_by 40 self.created_at = created_at 41 self.updated_at = updated_at 42 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceContact.
38 def to_dict(self) -> Dict[str, Any]: 39 id = self.id 40 41 title = self.title 42 43 description = self.description 44 45 name = self.name 46 47 phone = self.phone 48 49 email = self.email 50 51 created_by = self.created_by 52 53 created_at = self.created_at.isoformat() 54 55 updated_at = self.updated_at.isoformat() 56 57 field_dict: Dict[str, Any] = {} 58 field_dict.update(self.additional_properties) 59 field_dict.update( 60 { 61 "id": id, 62 "title": title, 63 "description": description, 64 "name": name, 65 "phone": phone, 66 "email": email, 67 "createdBy": created_by, 68 "createdAt": created_at, 69 "updatedAt": updated_at, 70 } 71 ) 72 73 return field_dict
75 @classmethod 76 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 77 d = src_dict.copy() 78 id = d.pop("id") 79 80 title = d.pop("title") 81 82 description = d.pop("description") 83 84 name = d.pop("name") 85 86 phone = d.pop("phone") 87 88 email = d.pop("email") 89 90 created_by = d.pop("createdBy") 91 92 created_at = isoparse(d.pop("createdAt")) 93 94 updated_at = isoparse(d.pop("updatedAt")) 95 96 governance_contact = cls( 97 id=id, 98 title=title, 99 description=description, 100 name=name, 101 phone=phone, 102 email=email, 103 created_by=created_by, 104 created_at=created_at, 105 updated_at=updated_at, 106 ) 107 108 governance_contact.additional_properties = d 109 return governance_contact
15@_attrs_define 16class GovernanceExpiry: 17 """ 18 Attributes: 19 type (Union[Unset, GovernanceExpiryType]): The expiry conditions that can be applied to governance requirements. 20 days (Union[None, Unset, int]): The number of days for a relative expiration 21 date (Union[None, Unset, datetime.datetime]): The date for an absolute expiration 22 """ 23 24 type: Union[Unset, GovernanceExpiryType] = UNSET 25 days: Union[None, Unset, int] = UNSET 26 date: Union[None, Unset, datetime.datetime] = UNSET 27 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> Dict[str, Any]: 30 type: Union[Unset, str] = UNSET 31 if not isinstance(self.type, Unset): 32 type = self.type.value 33 34 days: Union[None, Unset, int] 35 if isinstance(self.days, Unset): 36 days = UNSET 37 else: 38 days = self.days 39 40 date: Union[None, Unset, str] 41 if isinstance(self.date, Unset): 42 date = UNSET 43 elif isinstance(self.date, datetime.datetime): 44 date = self.date.isoformat() 45 else: 46 date = self.date 47 48 field_dict: Dict[str, Any] = {} 49 field_dict.update(self.additional_properties) 50 field_dict.update({}) 51 if type is not UNSET: 52 field_dict["type"] = type 53 if days is not UNSET: 54 field_dict["days"] = days 55 if date is not UNSET: 56 field_dict["date"] = date 57 58 return field_dict 59 60 @classmethod 61 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 62 d = src_dict.copy() 63 _type = d.pop("type", UNSET) 64 type: Union[Unset, GovernanceExpiryType] 65 if isinstance(_type, Unset): 66 type = UNSET 67 else: 68 type = GovernanceExpiryType(_type) 69 70 def _parse_days(data: object) -> Union[None, Unset, int]: 71 if data is None: 72 return data 73 if isinstance(data, Unset): 74 return data 75 return cast(Union[None, Unset, int], data) 76 77 days = _parse_days(d.pop("days", UNSET)) 78 79 def _parse_date(data: object) -> Union[None, Unset, datetime.datetime]: 80 if data is None: 81 return data 82 if isinstance(data, Unset): 83 return data 84 try: 85 if not isinstance(data, str): 86 raise TypeError() 87 date_type_0 = isoparse(data) 88 89 return date_type_0 90 except: # noqa: E722 91 pass 92 return cast(Union[None, Unset, datetime.datetime], data) 93 94 date = _parse_date(d.pop("date", UNSET)) 95 96 governance_expiry = cls( 97 type=type, 98 days=days, 99 date=date, 100 ) 101 102 governance_expiry.additional_properties = d 103 return governance_expiry 104 105 @property 106 def additional_keys(self) -> List[str]: 107 return list(self.additional_properties.keys())
Attributes:
- type (Union[Unset, GovernanceExpiryType]): The expiry conditions that can be applied to governance requirements.
- days (Union[None, Unset, int]): The number of days for a relative expiration
- date (Union[None, Unset, datetime.datetime]): The date for an absolute expiration
26def __init__(self, type=attr_dict['type'].default, days=attr_dict['days'].default, date=attr_dict['date'].default): 27 self.type = type 28 self.days = days 29 self.date = date 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceExpiry.
29 def to_dict(self) -> Dict[str, Any]: 30 type: Union[Unset, str] = UNSET 31 if not isinstance(self.type, Unset): 32 type = self.type.value 33 34 days: Union[None, Unset, int] 35 if isinstance(self.days, Unset): 36 days = UNSET 37 else: 38 days = self.days 39 40 date: Union[None, Unset, str] 41 if isinstance(self.date, Unset): 42 date = UNSET 43 elif isinstance(self.date, datetime.datetime): 44 date = self.date.isoformat() 45 else: 46 date = self.date 47 48 field_dict: Dict[str, Any] = {} 49 field_dict.update(self.additional_properties) 50 field_dict.update({}) 51 if type is not UNSET: 52 field_dict["type"] = type 53 if days is not UNSET: 54 field_dict["days"] = days 55 if date is not UNSET: 56 field_dict["date"] = date 57 58 return field_dict
60 @classmethod 61 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 62 d = src_dict.copy() 63 _type = d.pop("type", UNSET) 64 type: Union[Unset, GovernanceExpiryType] 65 if isinstance(_type, Unset): 66 type = UNSET 67 else: 68 type = GovernanceExpiryType(_type) 69 70 def _parse_days(data: object) -> Union[None, Unset, int]: 71 if data is None: 72 return data 73 if isinstance(data, Unset): 74 return data 75 return cast(Union[None, Unset, int], data) 76 77 days = _parse_days(d.pop("days", UNSET)) 78 79 def _parse_date(data: object) -> Union[None, Unset, datetime.datetime]: 80 if data is None: 81 return data 82 if isinstance(data, Unset): 83 return data 84 try: 85 if not isinstance(data, str): 86 raise TypeError() 87 date_type_0 = isoparse(data) 88 89 return date_type_0 90 except: # noqa: E722 91 pass 92 return cast(Union[None, Unset, datetime.datetime], data) 93 94 date = _parse_date(d.pop("date", UNSET)) 95 96 governance_expiry = cls( 97 type=type, 98 days=days, 99 date=date, 100 ) 101 102 governance_expiry.additional_properties = d 103 return governance_expiry
5class GovernanceExpiryType(str, Enum): 6 ABSOLUTE = "ABSOLUTE" 7 NONE = "NONE" 8 RELATIVE_COMPLETION = "RELATIVE_COMPLETION" 9 RELATIVE_ENACTMENT = "RELATIVE_ENACTMENT" 10 UNKNOWN = "UNKNOWN" 11 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 12 13 def __str__(self) -> str: 14 return str(self.value) 15 16 @classmethod 17 def _missing_(cls, number): 18 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
13@_attrs_define 14class GovernanceFile: 15 """ 16 Attributes: 17 name (Union[Unset, str]): The title of the resource visible to users 18 description (Union[Unset, str]): A description of the resource visible to users 19 src (Union[Unset, str]): The file name without path or the full link path 20 type (Union[Unset, GovernanceFileType]): The options for supplementals for governance requirements 21 """ 22 23 name: Union[Unset, str] = UNSET 24 description: Union[Unset, str] = UNSET 25 src: Union[Unset, str] = UNSET 26 type: Union[Unset, GovernanceFileType] = UNSET 27 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> Dict[str, Any]: 30 name = self.name 31 32 description = self.description 33 34 src = self.src 35 36 type: Union[Unset, str] = UNSET 37 if not isinstance(self.type, Unset): 38 type = self.type.value 39 40 field_dict: Dict[str, Any] = {} 41 field_dict.update(self.additional_properties) 42 field_dict.update({}) 43 if name is not UNSET: 44 field_dict["name"] = name 45 if description is not UNSET: 46 field_dict["description"] = description 47 if src is not UNSET: 48 field_dict["src"] = src 49 if type is not UNSET: 50 field_dict["type"] = type 51 52 return field_dict 53 54 @classmethod 55 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 56 d = src_dict.copy() 57 name = d.pop("name", UNSET) 58 59 description = d.pop("description", UNSET) 60 61 src = d.pop("src", UNSET) 62 63 _type = d.pop("type", UNSET) 64 type: Union[Unset, GovernanceFileType] 65 if isinstance(_type, Unset): 66 type = UNSET 67 else: 68 type = GovernanceFileType(_type) 69 70 governance_file = cls( 71 name=name, 72 description=description, 73 src=src, 74 type=type, 75 ) 76 77 governance_file.additional_properties = d 78 return governance_file 79 80 @property 81 def additional_keys(self) -> List[str]: 82 return list(self.additional_properties.keys())
Attributes:
- name (Union[Unset, str]): The title of the resource visible to users
- description (Union[Unset, str]): A description of the resource visible to users
- src (Union[Unset, str]): The file name without path or the full link path
- type (Union[Unset, GovernanceFileType]): The options for supplementals for governance requirements
27def __init__(self, name=attr_dict['name'].default, description=attr_dict['description'].default, src=attr_dict['src'].default, type=attr_dict['type'].default): 28 self.name = name 29 self.description = description 30 self.src = src 31 self.type = type 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceFile.
29 def to_dict(self) -> Dict[str, Any]: 30 name = self.name 31 32 description = self.description 33 34 src = self.src 35 36 type: Union[Unset, str] = UNSET 37 if not isinstance(self.type, Unset): 38 type = self.type.value 39 40 field_dict: Dict[str, Any] = {} 41 field_dict.update(self.additional_properties) 42 field_dict.update({}) 43 if name is not UNSET: 44 field_dict["name"] = name 45 if description is not UNSET: 46 field_dict["description"] = description 47 if src is not UNSET: 48 field_dict["src"] = src 49 if type is not UNSET: 50 field_dict["type"] = type 51 52 return field_dict
54 @classmethod 55 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 56 d = src_dict.copy() 57 name = d.pop("name", UNSET) 58 59 description = d.pop("description", UNSET) 60 61 src = d.pop("src", UNSET) 62 63 _type = d.pop("type", UNSET) 64 type: Union[Unset, GovernanceFileType] 65 if isinstance(_type, Unset): 66 type = UNSET 67 else: 68 type = GovernanceFileType(_type) 69 70 governance_file = cls( 71 name=name, 72 description=description, 73 src=src, 74 type=type, 75 ) 76 77 governance_file.additional_properties = d 78 return governance_file
13@_attrs_define 14class GovernanceFileAccessRequest: 15 """ 16 Attributes: 17 access_type (GovernanceAccessType): 18 fulfillment_id (Union[None, Unset, str]): 19 project_id (Union[None, Unset, str]): 20 token_lifetime_hours (Union[None, Unset, int]): 21 """ 22 23 access_type: GovernanceAccessType 24 fulfillment_id: Union[None, Unset, str] = UNSET 25 project_id: Union[None, Unset, str] = UNSET 26 token_lifetime_hours: Union[None, Unset, int] = UNSET 27 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> Dict[str, Any]: 30 access_type = self.access_type.value 31 32 fulfillment_id: Union[None, Unset, str] 33 if isinstance(self.fulfillment_id, Unset): 34 fulfillment_id = UNSET 35 else: 36 fulfillment_id = self.fulfillment_id 37 38 project_id: Union[None, Unset, str] 39 if isinstance(self.project_id, Unset): 40 project_id = UNSET 41 else: 42 project_id = self.project_id 43 44 token_lifetime_hours: Union[None, Unset, int] 45 if isinstance(self.token_lifetime_hours, Unset): 46 token_lifetime_hours = UNSET 47 else: 48 token_lifetime_hours = self.token_lifetime_hours 49 50 field_dict: Dict[str, Any] = {} 51 field_dict.update(self.additional_properties) 52 field_dict.update( 53 { 54 "accessType": access_type, 55 } 56 ) 57 if fulfillment_id is not UNSET: 58 field_dict["fulfillmentId"] = fulfillment_id 59 if project_id is not UNSET: 60 field_dict["projectId"] = project_id 61 if token_lifetime_hours is not UNSET: 62 field_dict["tokenLifetimeHours"] = token_lifetime_hours 63 64 return field_dict 65 66 @classmethod 67 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 68 d = src_dict.copy() 69 access_type = GovernanceAccessType(d.pop("accessType")) 70 71 def _parse_fulfillment_id(data: object) -> Union[None, Unset, str]: 72 if data is None: 73 return data 74 if isinstance(data, Unset): 75 return data 76 return cast(Union[None, Unset, str], data) 77 78 fulfillment_id = _parse_fulfillment_id(d.pop("fulfillmentId", UNSET)) 79 80 def _parse_project_id(data: object) -> Union[None, Unset, str]: 81 if data is None: 82 return data 83 if isinstance(data, Unset): 84 return data 85 return cast(Union[None, Unset, str], data) 86 87 project_id = _parse_project_id(d.pop("projectId", UNSET)) 88 89 def _parse_token_lifetime_hours(data: object) -> Union[None, Unset, int]: 90 if data is None: 91 return data 92 if isinstance(data, Unset): 93 return data 94 return cast(Union[None, Unset, int], data) 95 96 token_lifetime_hours = _parse_token_lifetime_hours(d.pop("tokenLifetimeHours", UNSET)) 97 98 governance_file_access_request = cls( 99 access_type=access_type, 100 fulfillment_id=fulfillment_id, 101 project_id=project_id, 102 token_lifetime_hours=token_lifetime_hours, 103 ) 104 105 governance_file_access_request.additional_properties = d 106 return governance_file_access_request 107 108 @property 109 def additional_keys(self) -> List[str]: 110 return list(self.additional_properties.keys())
Attributes:
- access_type (GovernanceAccessType):
- fulfillment_id (Union[None, Unset, str]):
- project_id (Union[None, Unset, str]):
- token_lifetime_hours (Union[None, Unset, int]):
27def __init__(self, access_type, fulfillment_id=attr_dict['fulfillment_id'].default, project_id=attr_dict['project_id'].default, token_lifetime_hours=attr_dict['token_lifetime_hours'].default): 28 self.access_type = access_type 29 self.fulfillment_id = fulfillment_id 30 self.project_id = project_id 31 self.token_lifetime_hours = token_lifetime_hours 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceFileAccessRequest.
29 def to_dict(self) -> Dict[str, Any]: 30 access_type = self.access_type.value 31 32 fulfillment_id: Union[None, Unset, str] 33 if isinstance(self.fulfillment_id, Unset): 34 fulfillment_id = UNSET 35 else: 36 fulfillment_id = self.fulfillment_id 37 38 project_id: Union[None, Unset, str] 39 if isinstance(self.project_id, Unset): 40 project_id = UNSET 41 else: 42 project_id = self.project_id 43 44 token_lifetime_hours: Union[None, Unset, int] 45 if isinstance(self.token_lifetime_hours, Unset): 46 token_lifetime_hours = UNSET 47 else: 48 token_lifetime_hours = self.token_lifetime_hours 49 50 field_dict: Dict[str, Any] = {} 51 field_dict.update(self.additional_properties) 52 field_dict.update( 53 { 54 "accessType": access_type, 55 } 56 ) 57 if fulfillment_id is not UNSET: 58 field_dict["fulfillmentId"] = fulfillment_id 59 if project_id is not UNSET: 60 field_dict["projectId"] = project_id 61 if token_lifetime_hours is not UNSET: 62 field_dict["tokenLifetimeHours"] = token_lifetime_hours 63 64 return field_dict
66 @classmethod 67 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 68 d = src_dict.copy() 69 access_type = GovernanceAccessType(d.pop("accessType")) 70 71 def _parse_fulfillment_id(data: object) -> Union[None, Unset, str]: 72 if data is None: 73 return data 74 if isinstance(data, Unset): 75 return data 76 return cast(Union[None, Unset, str], data) 77 78 fulfillment_id = _parse_fulfillment_id(d.pop("fulfillmentId", UNSET)) 79 80 def _parse_project_id(data: object) -> Union[None, Unset, str]: 81 if data is None: 82 return data 83 if isinstance(data, Unset): 84 return data 85 return cast(Union[None, Unset, str], data) 86 87 project_id = _parse_project_id(d.pop("projectId", UNSET)) 88 89 def _parse_token_lifetime_hours(data: object) -> Union[None, Unset, int]: 90 if data is None: 91 return data 92 if isinstance(data, Unset): 93 return data 94 return cast(Union[None, Unset, int], data) 95 96 token_lifetime_hours = _parse_token_lifetime_hours(d.pop("tokenLifetimeHours", UNSET)) 97 98 governance_file_access_request = cls( 99 access_type=access_type, 100 fulfillment_id=fulfillment_id, 101 project_id=project_id, 102 token_lifetime_hours=token_lifetime_hours, 103 ) 104 105 governance_file_access_request.additional_properties = d 106 return governance_file_access_request
12@_attrs_define 13class GovernanceFileInput: 14 """ 15 Attributes: 16 name (str): 17 description (str): 18 src (str): 19 type (GovernanceFileType): The options for supplementals for governance requirements 20 """ 21 22 name: str 23 description: str 24 src: str 25 type: GovernanceFileType 26 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 27 28 def to_dict(self) -> Dict[str, Any]: 29 name = self.name 30 31 description = self.description 32 33 src = self.src 34 35 type = self.type.value 36 37 field_dict: Dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "name": name, 42 "description": description, 43 "src": src, 44 "type": type, 45 } 46 ) 47 48 return field_dict 49 50 @classmethod 51 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 52 d = src_dict.copy() 53 name = d.pop("name") 54 55 description = d.pop("description") 56 57 src = d.pop("src") 58 59 type = GovernanceFileType(d.pop("type")) 60 61 governance_file_input = cls( 62 name=name, 63 description=description, 64 src=src, 65 type=type, 66 ) 67 68 governance_file_input.additional_properties = d 69 return governance_file_input 70 71 @property 72 def additional_keys(self) -> List[str]: 73 return list(self.additional_properties.keys())
Attributes:
- name (str):
- description (str):
- src (str):
- type (GovernanceFileType): The options for supplementals for governance requirements
27def __init__(self, name, description, src, type): 28 self.name = name 29 self.description = description 30 self.src = src 31 self.type = type 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceFileInput.
28 def to_dict(self) -> Dict[str, Any]: 29 name = self.name 30 31 description = self.description 32 33 src = self.src 34 35 type = self.type.value 36 37 field_dict: Dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "name": name, 42 "description": description, 43 "src": src, 44 "type": type, 45 } 46 ) 47 48 return field_dict
50 @classmethod 51 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 52 d = src_dict.copy() 53 name = d.pop("name") 54 55 description = d.pop("description") 56 57 src = d.pop("src") 58 59 type = GovernanceFileType(d.pop("type")) 60 61 governance_file_input = cls( 62 name=name, 63 description=description, 64 src=src, 65 type=type, 66 ) 67 68 governance_file_input.additional_properties = d 69 return governance_file_input
5class GovernanceFileType(str, Enum): 6 FILE = "FILE" 7 LINK = "LINK" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
23@_attrs_define 24class GovernanceRequirement: 25 """ 26 Attributes: 27 id (str): The unique identifier for the requirement 28 name (str): The name of the requirement 29 description (str): A brief description of the requirement 30 type (GovernanceType): The types of governance requirements that can be enforced 31 path (str): S3 prefix where files for the requirement are saved 32 scope (GovernanceScope): The levels at which governance requirements can be enforced 33 contact_ids (List[str]): The IDs of governance contacts assigned to the requirement. 34 expiration (GovernanceExpiry): 35 created_by (str): 36 created_at (datetime.datetime): 37 updated_at (datetime.datetime): 38 project_id (Union[Unset, str]): The project ID if the requirement is project scope 39 acceptance (Union[GovernanceScope, None, Unset]): Specifies the level at which it is satisfied 40 enactment_date (Union[None, Unset, datetime.datetime]): The date of enactment for a requirement 41 supplemental_docs (Union[List['GovernanceFile'], None, Unset]): Optional files with extra information, e.g. 42 templates for documents, links, etc 43 file (Union['GovernanceFile', None, Unset]): 44 authorship (Union[GovernanceScope, None, Unset]): Who needs to supply the agreement document 45 project_file_map (Union['GovernanceRequirementProjectFileMap', None, Unset]): Files supplied by each project 46 when authorship is project 47 verification_method (Union[GovernanceTrainingVerification, None, Unset]): The value indicating how the 48 completion of the training is verified. 49 """ 50 51 id: str 52 name: str 53 description: str 54 type: GovernanceType 55 path: str 56 scope: GovernanceScope 57 contact_ids: List[str] 58 expiration: "GovernanceExpiry" 59 created_by: str 60 created_at: datetime.datetime 61 updated_at: datetime.datetime 62 project_id: Union[Unset, str] = UNSET 63 acceptance: Union[GovernanceScope, None, Unset] = UNSET 64 enactment_date: Union[None, Unset, datetime.datetime] = UNSET 65 supplemental_docs: Union[List["GovernanceFile"], None, Unset] = UNSET 66 file: Union["GovernanceFile", None, Unset] = UNSET 67 authorship: Union[GovernanceScope, None, Unset] = UNSET 68 project_file_map: Union["GovernanceRequirementProjectFileMap", None, Unset] = UNSET 69 verification_method: Union[GovernanceTrainingVerification, None, Unset] = UNSET 70 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 71 72 def to_dict(self) -> Dict[str, Any]: 73 from ..models.governance_file import GovernanceFile 74 from ..models.governance_requirement_project_file_map import GovernanceRequirementProjectFileMap 75 76 id = self.id 77 78 name = self.name 79 80 description = self.description 81 82 type = self.type.value 83 84 path = self.path 85 86 scope = self.scope.value 87 88 contact_ids = self.contact_ids 89 90 expiration = self.expiration.to_dict() 91 92 created_by = self.created_by 93 94 created_at = self.created_at.isoformat() 95 96 updated_at = self.updated_at.isoformat() 97 98 project_id = self.project_id 99 100 acceptance: Union[None, Unset, str] 101 if isinstance(self.acceptance, Unset): 102 acceptance = UNSET 103 elif isinstance(self.acceptance, GovernanceScope): 104 acceptance = self.acceptance.value 105 else: 106 acceptance = self.acceptance 107 108 enactment_date: Union[None, Unset, str] 109 if isinstance(self.enactment_date, Unset): 110 enactment_date = UNSET 111 elif isinstance(self.enactment_date, datetime.datetime): 112 enactment_date = self.enactment_date.isoformat() 113 else: 114 enactment_date = self.enactment_date 115 116 supplemental_docs: Union[List[Dict[str, Any]], None, Unset] 117 if isinstance(self.supplemental_docs, Unset): 118 supplemental_docs = UNSET 119 elif isinstance(self.supplemental_docs, list): 120 supplemental_docs = [] 121 for supplemental_docs_type_0_item_data in self.supplemental_docs: 122 supplemental_docs_type_0_item = supplemental_docs_type_0_item_data.to_dict() 123 supplemental_docs.append(supplemental_docs_type_0_item) 124 125 else: 126 supplemental_docs = self.supplemental_docs 127 128 file: Union[Dict[str, Any], None, Unset] 129 if isinstance(self.file, Unset): 130 file = UNSET 131 elif isinstance(self.file, GovernanceFile): 132 file = self.file.to_dict() 133 else: 134 file = self.file 135 136 authorship: Union[None, Unset, str] 137 if isinstance(self.authorship, Unset): 138 authorship = UNSET 139 elif isinstance(self.authorship, GovernanceScope): 140 authorship = self.authorship.value 141 else: 142 authorship = self.authorship 143 144 project_file_map: Union[Dict[str, Any], None, Unset] 145 if isinstance(self.project_file_map, Unset): 146 project_file_map = UNSET 147 elif isinstance(self.project_file_map, GovernanceRequirementProjectFileMap): 148 project_file_map = self.project_file_map.to_dict() 149 else: 150 project_file_map = self.project_file_map 151 152 verification_method: Union[None, Unset, str] 153 if isinstance(self.verification_method, Unset): 154 verification_method = UNSET 155 elif isinstance(self.verification_method, GovernanceTrainingVerification): 156 verification_method = self.verification_method.value 157 else: 158 verification_method = self.verification_method 159 160 field_dict: Dict[str, Any] = {} 161 field_dict.update(self.additional_properties) 162 field_dict.update( 163 { 164 "id": id, 165 "name": name, 166 "description": description, 167 "type": type, 168 "path": path, 169 "scope": scope, 170 "contactIds": contact_ids, 171 "expiration": expiration, 172 "createdBy": created_by, 173 "createdAt": created_at, 174 "updatedAt": updated_at, 175 } 176 ) 177 if project_id is not UNSET: 178 field_dict["projectId"] = project_id 179 if acceptance is not UNSET: 180 field_dict["acceptance"] = acceptance 181 if enactment_date is not UNSET: 182 field_dict["enactmentDate"] = enactment_date 183 if supplemental_docs is not UNSET: 184 field_dict["supplementalDocs"] = supplemental_docs 185 if file is not UNSET: 186 field_dict["file"] = file 187 if authorship is not UNSET: 188 field_dict["authorship"] = authorship 189 if project_file_map is not UNSET: 190 field_dict["projectFileMap"] = project_file_map 191 if verification_method is not UNSET: 192 field_dict["verificationMethod"] = verification_method 193 194 return field_dict 195 196 @classmethod 197 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 198 from ..models.governance_expiry import GovernanceExpiry 199 from ..models.governance_file import GovernanceFile 200 from ..models.governance_requirement_project_file_map import GovernanceRequirementProjectFileMap 201 202 d = src_dict.copy() 203 id = d.pop("id") 204 205 name = d.pop("name") 206 207 description = d.pop("description") 208 209 type = GovernanceType(d.pop("type")) 210 211 path = d.pop("path") 212 213 scope = GovernanceScope(d.pop("scope")) 214 215 contact_ids = cast(List[str], d.pop("contactIds")) 216 217 expiration = GovernanceExpiry.from_dict(d.pop("expiration")) 218 219 created_by = d.pop("createdBy") 220 221 created_at = isoparse(d.pop("createdAt")) 222 223 updated_at = isoparse(d.pop("updatedAt")) 224 225 project_id = d.pop("projectId", UNSET) 226 227 def _parse_acceptance(data: object) -> Union[GovernanceScope, None, Unset]: 228 if data is None: 229 return data 230 if isinstance(data, Unset): 231 return data 232 try: 233 if not isinstance(data, str): 234 raise TypeError() 235 acceptance_type_1 = GovernanceScope(data) 236 237 return acceptance_type_1 238 except: # noqa: E722 239 pass 240 return cast(Union[GovernanceScope, None, Unset], data) 241 242 acceptance = _parse_acceptance(d.pop("acceptance", UNSET)) 243 244 def _parse_enactment_date(data: object) -> Union[None, Unset, datetime.datetime]: 245 if data is None: 246 return data 247 if isinstance(data, Unset): 248 return data 249 try: 250 if not isinstance(data, str): 251 raise TypeError() 252 enactment_date_type_0 = isoparse(data) 253 254 return enactment_date_type_0 255 except: # noqa: E722 256 pass 257 return cast(Union[None, Unset, datetime.datetime], data) 258 259 enactment_date = _parse_enactment_date(d.pop("enactmentDate", UNSET)) 260 261 def _parse_supplemental_docs(data: object) -> Union[List["GovernanceFile"], None, Unset]: 262 if data is None: 263 return data 264 if isinstance(data, Unset): 265 return data 266 try: 267 if not isinstance(data, list): 268 raise TypeError() 269 supplemental_docs_type_0 = [] 270 _supplemental_docs_type_0 = data 271 for supplemental_docs_type_0_item_data in _supplemental_docs_type_0: 272 supplemental_docs_type_0_item = GovernanceFile.from_dict(supplemental_docs_type_0_item_data) 273 274 supplemental_docs_type_0.append(supplemental_docs_type_0_item) 275 276 return supplemental_docs_type_0 277 except: # noqa: E722 278 pass 279 return cast(Union[List["GovernanceFile"], None, Unset], data) 280 281 supplemental_docs = _parse_supplemental_docs(d.pop("supplementalDocs", UNSET)) 282 283 def _parse_file(data: object) -> Union["GovernanceFile", None, Unset]: 284 if data is None: 285 return data 286 if isinstance(data, Unset): 287 return data 288 try: 289 if not isinstance(data, dict): 290 raise TypeError() 291 file_type_1 = GovernanceFile.from_dict(data) 292 293 return file_type_1 294 except: # noqa: E722 295 pass 296 return cast(Union["GovernanceFile", None, Unset], data) 297 298 file = _parse_file(d.pop("file", UNSET)) 299 300 def _parse_authorship(data: object) -> Union[GovernanceScope, None, Unset]: 301 if data is None: 302 return data 303 if isinstance(data, Unset): 304 return data 305 try: 306 if not isinstance(data, str): 307 raise TypeError() 308 authorship_type_1 = GovernanceScope(data) 309 310 return authorship_type_1 311 except: # noqa: E722 312 pass 313 return cast(Union[GovernanceScope, None, Unset], data) 314 315 authorship = _parse_authorship(d.pop("authorship", UNSET)) 316 317 def _parse_project_file_map(data: object) -> Union["GovernanceRequirementProjectFileMap", None, Unset]: 318 if data is None: 319 return data 320 if isinstance(data, Unset): 321 return data 322 try: 323 if not isinstance(data, dict): 324 raise TypeError() 325 project_file_map_type_0 = GovernanceRequirementProjectFileMap.from_dict(data) 326 327 return project_file_map_type_0 328 except: # noqa: E722 329 pass 330 return cast(Union["GovernanceRequirementProjectFileMap", None, Unset], data) 331 332 project_file_map = _parse_project_file_map(d.pop("projectFileMap", UNSET)) 333 334 def _parse_verification_method(data: object) -> Union[GovernanceTrainingVerification, None, Unset]: 335 if data is None: 336 return data 337 if isinstance(data, Unset): 338 return data 339 try: 340 if not isinstance(data, str): 341 raise TypeError() 342 verification_method_type_1 = GovernanceTrainingVerification(data) 343 344 return verification_method_type_1 345 except: # noqa: E722 346 pass 347 return cast(Union[GovernanceTrainingVerification, None, Unset], data) 348 349 verification_method = _parse_verification_method(d.pop("verificationMethod", UNSET)) 350 351 governance_requirement = cls( 352 id=id, 353 name=name, 354 description=description, 355 type=type, 356 path=path, 357 scope=scope, 358 contact_ids=contact_ids, 359 expiration=expiration, 360 created_by=created_by, 361 created_at=created_at, 362 updated_at=updated_at, 363 project_id=project_id, 364 acceptance=acceptance, 365 enactment_date=enactment_date, 366 supplemental_docs=supplemental_docs, 367 file=file, 368 authorship=authorship, 369 project_file_map=project_file_map, 370 verification_method=verification_method, 371 ) 372 373 governance_requirement.additional_properties = d 374 return governance_requirement 375 376 @property 377 def additional_keys(self) -> List[str]: 378 return list(self.additional_properties.keys())
Attributes:
- id (str): The unique identifier for the requirement
- name (str): The name of the requirement
- description (str): A brief description of the requirement
- type (GovernanceType): The types of governance requirements that can be enforced
- path (str): S3 prefix where files for the requirement are saved
- scope (GovernanceScope): The levels at which governance requirements can be enforced
- contact_ids (List[str]): The IDs of governance contacts assigned to the requirement.
- expiration (GovernanceExpiry):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- project_id (Union[Unset, str]): The project ID if the requirement is project scope
- acceptance (Union[GovernanceScope, None, Unset]): Specifies the level at which it is satisfied
- enactment_date (Union[None, Unset, datetime.datetime]): The date of enactment for a requirement
- supplemental_docs (Union[List['GovernanceFile'], None, Unset]): Optional files with extra information, e.g. templates for documents, links, etc
- file (Union['GovernanceFile', None, Unset]):
- authorship (Union[GovernanceScope, None, Unset]): Who needs to supply the agreement document
- project_file_map (Union['GovernanceRequirementProjectFileMap', None, Unset]): Files supplied by each project when authorship is project
- verification_method (Union[GovernanceTrainingVerification, None, Unset]): The value indicating how the completion of the training is verified.
42def __init__(self, id, name, description, type, path, scope, contact_ids, expiration, created_by, created_at, updated_at, project_id=attr_dict['project_id'].default, acceptance=attr_dict['acceptance'].default, enactment_date=attr_dict['enactment_date'].default, supplemental_docs=attr_dict['supplemental_docs'].default, file=attr_dict['file'].default, authorship=attr_dict['authorship'].default, project_file_map=attr_dict['project_file_map'].default, verification_method=attr_dict['verification_method'].default): 43 self.id = id 44 self.name = name 45 self.description = description 46 self.type = type 47 self.path = path 48 self.scope = scope 49 self.contact_ids = contact_ids 50 self.expiration = expiration 51 self.created_by = created_by 52 self.created_at = created_at 53 self.updated_at = updated_at 54 self.project_id = project_id 55 self.acceptance = acceptance 56 self.enactment_date = enactment_date 57 self.supplemental_docs = supplemental_docs 58 self.file = file 59 self.authorship = authorship 60 self.project_file_map = project_file_map 61 self.verification_method = verification_method 62 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class GovernanceRequirement.
72 def to_dict(self) -> Dict[str, Any]: 73 from ..models.governance_file import GovernanceFile 74 from ..models.governance_requirement_project_file_map import GovernanceRequirementProjectFileMap 75 76 id = self.id 77 78 name = self.name 79 80 description = self.description 81 82 type = self.type.value 83 84 path = self.path 85 86 scope = self.scope.value 87 88 contact_ids = self.contact_ids 89 90 expiration = self.expiration.to_dict() 91 92 created_by = self.created_by 93 94 created_at = self.created_at.isoformat() 95 96 updated_at = self.updated_at.isoformat() 97 98 project_id = self.project_id 99 100 acceptance: Union[None, Unset, str] 101 if isinstance(self.acceptance, Unset): 102 acceptance = UNSET 103 elif isinstance(self.acceptance, GovernanceScope): 104 acceptance = self.acceptance.value 105 else: 106 acceptance = self.acceptance 107 108 enactment_date: Union[None, Unset, str] 109 if isinstance(self.enactment_date, Unset): 110 enactment_date = UNSET 111 elif isinstance(self.enactment_date, datetime.datetime): 112 enactment_date = self.enactment_date.isoformat() 113 else: 114 enactment_date = self.enactment_date 115 116 supplemental_docs: Union[List[Dict[str, Any]], None, Unset] 117 if isinstance(self.supplemental_docs, Unset): 118 supplemental_docs = UNSET 119 elif isinstance(self.supplemental_docs, list): 120 supplemental_docs = [] 121 for supplemental_docs_type_0_item_data in self.supplemental_docs: 122 supplemental_docs_type_0_item = supplemental_docs_type_0_item_data.to_dict() 123 supplemental_docs.append(supplemental_docs_type_0_item) 124 125 else: 126 supplemental_docs = self.supplemental_docs 127 128 file: Union[Dict[str, Any], None, Unset] 129 if isinstance(self.file, Unset): 130 file = UNSET 131 elif isinstance(self.file, GovernanceFile): 132 file = self.file.to_dict() 133 else: 134 file = self.file 135 136 authorship: Union[None, Unset, str] 137 if isinstance(self.authorship, Unset): 138 authorship = UNSET 139 elif isinstance(self.authorship, GovernanceScope): 140 authorship = self.authorship.value 141 else: 142 authorship = self.authorship 143 144 project_file_map: Union[Dict[str, Any], None, Unset] 145 if isinstance(self.project_file_map, Unset): 146 project_file_map = UNSET 147 elif isinstance(self.project_file_map, GovernanceRequirementProjectFileMap): 148 project_file_map = self.project_file_map.to_dict() 149 else: 150 project_file_map = self.project_file_map 151 152 verification_method: Union[None, Unset, str] 153 if isinstance(self.verification_method, Unset): 154 verification_method = UNSET 155 elif isinstance(self.verification_method, GovernanceTrainingVerification): 156 verification_method = self.verification_method.value 157 else: 158 verification_method = self.verification_method 159 160 field_dict: Dict[str, Any] = {} 161 field_dict.update(self.additional_properties) 162 field_dict.update( 163 { 164 "id": id, 165 "name": name, 166 "description": description, 167 "type": type, 168 "path": path, 169 "scope": scope, 170 "contactIds": contact_ids, 171 "expiration": expiration, 172 "createdBy": created_by, 173 "createdAt": created_at, 174 "updatedAt": updated_at, 175 } 176 ) 177 if project_id is not UNSET: 178 field_dict["projectId"] = project_id 179 if acceptance is not UNSET: 180 field_dict["acceptance"] = acceptance 181 if enactment_date is not UNSET: 182 field_dict["enactmentDate"] = enactment_date 183 if supplemental_docs is not UNSET: 184 field_dict["supplementalDocs"] = supplemental_docs 185 if file is not UNSET: 186 field_dict["file"] = file 187 if authorship is not UNSET: 188 field_dict["authorship"] = authorship 189 if project_file_map is not UNSET: 190 field_dict["projectFileMap"] = project_file_map 191 if verification_method is not UNSET: 192 field_dict["verificationMethod"] = verification_method 193 194 return field_dict
196 @classmethod 197 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 198 from ..models.governance_expiry import GovernanceExpiry 199 from ..models.governance_file import GovernanceFile 200 from ..models.governance_requirement_project_file_map import GovernanceRequirementProjectFileMap 201 202 d = src_dict.copy() 203 id = d.pop("id") 204 205 name = d.pop("name") 206 207 description = d.pop("description") 208 209 type = GovernanceType(d.pop("type")) 210 211 path = d.pop("path") 212 213 scope = GovernanceScope(d.pop("scope")) 214 215 contact_ids = cast(List[str], d.pop("contactIds")) 216 217 expiration = GovernanceExpiry.from_dict(d.pop("expiration")) 218 219 created_by = d.pop("createdBy") 220 221 created_at = isoparse(d.pop("createdAt")) 222 223 updated_at = isoparse(d.pop("updatedAt")) 224 225 project_id = d.pop("projectId", UNSET) 226 227 def _parse_acceptance(data: object) -> Union[GovernanceScope, None, Unset]: 228 if data is None: 229 return data 230 if isinstance(data, Unset): 231 return data 232 try: 233 if not isinstance(data, str): 234 raise TypeError() 235 acceptance_type_1 = GovernanceScope(data) 236 237 return acceptance_type_1 238 except: # noqa: E722 239 pass 240 return cast(Union[GovernanceScope, None, Unset], data) 241 242 acceptance = _parse_acceptance(d.pop("acceptance", UNSET)) 243 244 def _parse_enactment_date(data: object) -> Union[None, Unset, datetime.datetime]: 245 if data is None: 246 return data 247 if isinstance(data, Unset): 248 return data 249 try: 250 if not isinstance(data, str): 251 raise TypeError() 252 enactment_date_type_0 = isoparse(data) 253 254 return enactment_date_type_0 255 except: # noqa: E722 256 pass 257 return cast(Union[None, Unset, datetime.datetime], data) 258 259 enactment_date = _parse_enactment_date(d.pop("enactmentDate", UNSET)) 260 261 def _parse_supplemental_docs(data: object) -> Union[List["GovernanceFile"], None, Unset]: 262 if data is None: 263 return data 264 if isinstance(data, Unset): 265 return data 266 try: 267 if not isinstance(data, list): 268 raise TypeError() 269 supplemental_docs_type_0 = [] 270 _supplemental_docs_type_0 = data 271 for supplemental_docs_type_0_item_data in _supplemental_docs_type_0: 272 supplemental_docs_type_0_item = GovernanceFile.from_dict(supplemental_docs_type_0_item_data) 273 274 supplemental_docs_type_0.append(supplemental_docs_type_0_item) 275 276 return supplemental_docs_type_0 277 except: # noqa: E722 278 pass 279 return cast(Union[List["GovernanceFile"], None, Unset], data) 280 281 supplemental_docs = _parse_supplemental_docs(d.pop("supplementalDocs", UNSET)) 282 283 def _parse_file(data: object) -> Union["GovernanceFile", None, Unset]: 284 if data is None: 285 return data 286 if isinstance(data, Unset): 287 return data 288 try: 289 if not isinstance(data, dict): 290 raise TypeError() 291 file_type_1 = GovernanceFile.from_dict(data) 292 293 return file_type_1 294 except: # noqa: E722 295 pass 296 return cast(Union["GovernanceFile", None, Unset], data) 297 298 file = _parse_file(d.pop("file", UNSET)) 299 300 def _parse_authorship(data: object) -> Union[GovernanceScope, None, Unset]: 301 if data is None: 302 return data 303 if isinstance(data, Unset): 304 return data 305 try: 306 if not isinstance(data, str): 307 raise TypeError() 308 authorship_type_1 = GovernanceScope(data) 309 310 return authorship_type_1 311 except: # noqa: E722 312 pass 313 return cast(Union[GovernanceScope, None, Unset], data) 314 315 authorship = _parse_authorship(d.pop("authorship", UNSET)) 316 317 def _parse_project_file_map(data: object) -> Union["GovernanceRequirementProjectFileMap", None, Unset]: 318 if data is None: 319 return data 320 if isinstance(data, Unset): 321 return data 322 try: 323 if not isinstance(data, dict): 324 raise TypeError() 325 project_file_map_type_0 = GovernanceRequirementProjectFileMap.from_dict(data) 326 327 return project_file_map_type_0 328 except: # noqa: E722 329 pass 330 return cast(Union["GovernanceRequirementProjectFileMap", None, Unset], data) 331 332 project_file_map = _parse_project_file_map(d.pop("projectFileMap", UNSET)) 333 334 def _parse_verification_method(data: object) -> Union[GovernanceTrainingVerification, None, Unset]: 335 if data is None: 336 return data 337 if isinstance(data, Unset): 338 return data 339 try: 340 if not isinstance(data, str): 341 raise TypeError() 342 verification_method_type_1 = GovernanceTrainingVerification(data) 343 344 return verification_method_type_1 345 except: # noqa: E722 346 pass 347 return cast(Union[GovernanceTrainingVerification, None, Unset], data) 348 349 verification_method = _parse_verification_method(d.pop("verificationMethod", UNSET)) 350 351 governance_requirement = cls( 352 id=id, 353 name=name, 354 description=description, 355 type=type, 356 path=path, 357 scope=scope, 358 contact_ids=contact_ids, 359 expiration=expiration, 360 created_by=created_by, 361 created_at=created_at, 362 updated_at=updated_at, 363 project_id=project_id, 364 acceptance=acceptance, 365 enactment_date=enactment_date, 366 supplemental_docs=supplemental_docs, 367 file=file, 368 authorship=authorship, 369 project_file_map=project_file_map, 370 verification_method=verification_method, 371 ) 372 373 governance_requirement.additional_properties = d 374 return governance_requirement
14@_attrs_define 15class GovernanceRequirementProjectFileMap: 16 """Files supplied by each project when authorship is project""" 17 18 additional_properties: Dict[str, "GovernanceFile"] = _attrs_field(init=False, factory=dict) 19 20 def to_dict(self) -> Dict[str, Any]: 21 field_dict: Dict[str, Any] = {} 22 for prop_name, prop in self.additional_properties.items(): 23 field_dict[prop_name] = prop.to_dict() 24 field_dict.update({}) 25 26 return field_dict 27 28 @classmethod 29 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 30 from ..models.governance_file import GovernanceFile 31 32 d = src_dict.copy() 33 governance_requirement_project_file_map = cls() 34 35 additional_properties = {} 36 for prop_name, prop_dict in d.items(): 37 additional_property = GovernanceFile.from_dict(prop_dict) 38 39 additional_properties[prop_name] = additional_property 40 41 governance_requirement_project_file_map.additional_properties = additional_properties 42 return governance_requirement_project_file_map 43 44 @property 45 def additional_keys(self) -> List[str]: 46 return list(self.additional_properties.keys())
Files supplied by each project when authorship is project
Method generated by attrs for class GovernanceRequirementProjectFileMap.
28 @classmethod 29 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 30 from ..models.governance_file import GovernanceFile 31 32 d = src_dict.copy() 33 governance_requirement_project_file_map = cls() 34 35 additional_properties = {} 36 for prop_name, prop_dict in d.items(): 37 additional_property = GovernanceFile.from_dict(prop_dict) 38 39 additional_properties[prop_name] = additional_property 40 41 governance_requirement_project_file_map.additional_properties = additional_properties 42 return governance_requirement_project_file_map
5class GovernanceScope(str, Enum): 6 PROJECT = "PROJECT" 7 TENANT = "TENANT" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
5class GovernanceTrainingVerification(str, Enum): 6 CERTIFICATE = "CERTIFICATE" 7 SELF = "SELF" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
5class GovernanceType(str, Enum): 6 AGREEMENT = "AGREEMENT" 7 DOCUMENT = "DOCUMENT" 8 TRAINING = "TRAINING" 9 UNKNOWN = "UNKNOWN" 10 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 11 12 def __str__(self) -> str: 13 return str(self.value) 14 15 @classmethod 16 def _missing_(cls, number): 17 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
16@_attrs_define 17class ImportDataRequest: 18 """ 19 Attributes: 20 name (str): Name of the dataset 21 public_ids (List[str]): 22 description (Union[Unset, str]): Description of the dataset 23 tags (Union[List['Tag'], None, Unset]): List of tags to apply to the dataset 24 """ 25 26 name: str 27 public_ids: List[str] 28 description: Union[Unset, str] = UNSET 29 tags: Union[List["Tag"], None, Unset] = UNSET 30 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 31 32 def to_dict(self) -> Dict[str, Any]: 33 name = self.name 34 35 public_ids = self.public_ids 36 37 description = self.description 38 39 tags: Union[List[Dict[str, Any]], None, Unset] 40 if isinstance(self.tags, Unset): 41 tags = UNSET 42 elif isinstance(self.tags, list): 43 tags = [] 44 for tags_type_0_item_data in self.tags: 45 tags_type_0_item = tags_type_0_item_data.to_dict() 46 tags.append(tags_type_0_item) 47 48 else: 49 tags = self.tags 50 51 field_dict: Dict[str, Any] = {} 52 field_dict.update(self.additional_properties) 53 field_dict.update( 54 { 55 "name": name, 56 "publicIds": public_ids, 57 } 58 ) 59 if description is not UNSET: 60 field_dict["description"] = description 61 if tags is not UNSET: 62 field_dict["tags"] = tags 63 64 return field_dict 65 66 @classmethod 67 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 68 from ..models.tag import Tag 69 70 d = src_dict.copy() 71 name = d.pop("name") 72 73 public_ids = cast(List[str], d.pop("publicIds")) 74 75 description = d.pop("description", UNSET) 76 77 def _parse_tags(data: object) -> Union[List["Tag"], None, Unset]: 78 if data is None: 79 return data 80 if isinstance(data, Unset): 81 return data 82 try: 83 if not isinstance(data, list): 84 raise TypeError() 85 tags_type_0 = [] 86 _tags_type_0 = data 87 for tags_type_0_item_data in _tags_type_0: 88 tags_type_0_item = Tag.from_dict(tags_type_0_item_data) 89 90 tags_type_0.append(tags_type_0_item) 91 92 return tags_type_0 93 except: # noqa: E722 94 pass 95 return cast(Union[List["Tag"], None, Unset], data) 96 97 tags = _parse_tags(d.pop("tags", UNSET)) 98 99 import_data_request = cls( 100 name=name, 101 public_ids=public_ids, 102 description=description, 103 tags=tags, 104 ) 105 106 import_data_request.additional_properties = d 107 return import_data_request 108 109 @property 110 def additional_keys(self) -> List[str]: 111 return list(self.additional_properties.keys())
Attributes:
- name (str): Name of the dataset
- public_ids (List[str]):
- description (Union[Unset, str]): Description of the dataset
- tags (Union[List['Tag'], None, Unset]): List of tags to apply to the dataset
27def __init__(self, name, public_ids, description=attr_dict['description'].default, tags=attr_dict['tags'].default): 28 self.name = name 29 self.public_ids = public_ids 30 self.description = description 31 self.tags = tags 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ImportDataRequest.
32 def to_dict(self) -> Dict[str, Any]: 33 name = self.name 34 35 public_ids = self.public_ids 36 37 description = self.description 38 39 tags: Union[List[Dict[str, Any]], None, Unset] 40 if isinstance(self.tags, Unset): 41 tags = UNSET 42 elif isinstance(self.tags, list): 43 tags = [] 44 for tags_type_0_item_data in self.tags: 45 tags_type_0_item = tags_type_0_item_data.to_dict() 46 tags.append(tags_type_0_item) 47 48 else: 49 tags = self.tags 50 51 field_dict: Dict[str, Any] = {} 52 field_dict.update(self.additional_properties) 53 field_dict.update( 54 { 55 "name": name, 56 "publicIds": public_ids, 57 } 58 ) 59 if description is not UNSET: 60 field_dict["description"] = description 61 if tags is not UNSET: 62 field_dict["tags"] = tags 63 64 return field_dict
66 @classmethod 67 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 68 from ..models.tag import Tag 69 70 d = src_dict.copy() 71 name = d.pop("name") 72 73 public_ids = cast(List[str], d.pop("publicIds")) 74 75 description = d.pop("description", UNSET) 76 77 def _parse_tags(data: object) -> Union[List["Tag"], None, Unset]: 78 if data is None: 79 return data 80 if isinstance(data, Unset): 81 return data 82 try: 83 if not isinstance(data, list): 84 raise TypeError() 85 tags_type_0 = [] 86 _tags_type_0 = data 87 for tags_type_0_item_data in _tags_type_0: 88 tags_type_0_item = Tag.from_dict(tags_type_0_item_data) 89 90 tags_type_0.append(tags_type_0_item) 91 92 return tags_type_0 93 except: # noqa: E722 94 pass 95 return cast(Union[List["Tag"], None, Unset], data) 96 97 tags = _parse_tags(d.pop("tags", UNSET)) 98 99 import_data_request = cls( 100 name=name, 101 public_ids=public_ids, 102 description=description, 103 tags=tags, 104 ) 105 106 import_data_request.additional_properties = d 107 return import_data_request
10@_attrs_define 11class InviteUserRequest: 12 """ 13 Attributes: 14 name (str): 15 organization (str): 16 email (str): 17 """ 18 19 name: str 20 organization: str 21 email: str 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 name = self.name 26 27 organization = self.organization 28 29 email = self.email 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "name": name, 36 "organization": organization, 37 "email": email, 38 } 39 ) 40 41 return field_dict 42 43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 name = d.pop("name") 47 48 organization = d.pop("organization") 49 50 email = d.pop("email") 51 52 invite_user_request = cls( 53 name=name, 54 organization=organization, 55 email=email, 56 ) 57 58 invite_user_request.additional_properties = d 59 return invite_user_request 60 61 @property 62 def additional_keys(self) -> List[str]: 63 return list(self.additional_properties.keys())
Attributes:
- name (str):
- organization (str):
- email (str):
26def __init__(self, name, organization, email): 27 self.name = name 28 self.organization = organization 29 self.email = email 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class InviteUserRequest.
24 def to_dict(self) -> Dict[str, Any]: 25 name = self.name 26 27 organization = self.organization 28 29 email = self.email 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "name": name, 36 "organization": organization, 37 "email": email, 38 } 39 ) 40 41 return field_dict
43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 name = d.pop("name") 47 48 organization = d.pop("organization") 49 50 email = d.pop("email") 51 52 invite_user_request = cls( 53 name=name, 54 organization=organization, 55 email=email, 56 ) 57 58 invite_user_request.additional_properties = d 59 return invite_user_request
10@_attrs_define 11class InviteUserResponse: 12 """ 13 Attributes: 14 message (str): 15 """ 16 17 message: str 18 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 19 20 def to_dict(self) -> Dict[str, Any]: 21 message = self.message 22 23 field_dict: Dict[str, Any] = {} 24 field_dict.update(self.additional_properties) 25 field_dict.update( 26 { 27 "message": message, 28 } 29 ) 30 31 return field_dict 32 33 @classmethod 34 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 35 d = src_dict.copy() 36 message = d.pop("message") 37 38 invite_user_response = cls( 39 message=message, 40 ) 41 42 invite_user_response.additional_properties = d 43 return invite_user_response 44 45 @property 46 def additional_keys(self) -> List[str]: 47 return list(self.additional_properties.keys())
Attributes:
- message (str):
24def __init__(self, message): 25 self.message = message 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class InviteUserResponse.
5class ListEventsEntityType(str, Enum): 6 BILLINGACCOUNT = "BillingAccount" 7 DATASET = "Dataset" 8 NOTEBOOKINSTANCE = "NotebookInstance" 9 PROCESS = "Process" 10 PROJECT = "Project" 11 SAMPLE = "Sample" 12 USER = "User" 13 USERPROJECTASSIGNMENT = "UserProjectAssignment" 14 UNKNOWN = "UNKNOWN" 15 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 16 17 def __str__(self) -> str: 18 return str(self.value) 19 20 @classmethod 21 def _missing_(cls, number): 22 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
12@_attrs_define 13class LogEntry: 14 """ 15 Attributes: 16 message (str): 17 timestamp (Union[Unset, int]): UNIX timestamp in milliseconds, might be blank if we don't have this info 18 """ 19 20 message: str 21 timestamp: Union[Unset, int] = UNSET 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 message = self.message 26 27 timestamp = self.timestamp 28 29 field_dict: Dict[str, Any] = {} 30 field_dict.update(self.additional_properties) 31 field_dict.update( 32 { 33 "message": message, 34 } 35 ) 36 if timestamp is not UNSET: 37 field_dict["timestamp"] = timestamp 38 39 return field_dict 40 41 @classmethod 42 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 43 d = src_dict.copy() 44 message = d.pop("message") 45 46 timestamp = d.pop("timestamp", UNSET) 47 48 log_entry = cls( 49 message=message, 50 timestamp=timestamp, 51 ) 52 53 log_entry.additional_properties = d 54 return log_entry 55 56 @property 57 def additional_keys(self) -> List[str]: 58 return list(self.additional_properties.keys())
Attributes:
- message (str):
- timestamp (Union[Unset, int]): UNIX timestamp in milliseconds, might be blank if we don't have this info
25def __init__(self, message, timestamp=attr_dict['timestamp'].default): 26 self.message = message 27 self.timestamp = timestamp 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class LogEntry.
24 def to_dict(self) -> Dict[str, Any]: 25 message = self.message 26 27 timestamp = self.timestamp 28 29 field_dict: Dict[str, Any] = {} 30 field_dict.update(self.additional_properties) 31 field_dict.update( 32 { 33 "message": message, 34 } 35 ) 36 if timestamp is not UNSET: 37 field_dict["timestamp"] = timestamp 38 39 return field_dict
41 @classmethod 42 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 43 d = src_dict.copy() 44 message = d.pop("message") 45 46 timestamp = d.pop("timestamp", UNSET) 47 48 log_entry = cls( 49 message=message, 50 timestamp=timestamp, 51 ) 52 53 log_entry.additional_properties = d 54 return log_entry
10@_attrs_define 11class LoginProvider: 12 """ 13 Attributes: 14 id (str): 15 name (str): 16 description (str): 17 logo_url (str): 18 """ 19 20 id: str 21 name: str 22 description: str 23 logo_url: str 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 id = self.id 28 29 name = self.name 30 31 description = self.description 32 33 logo_url = self.logo_url 34 35 field_dict: Dict[str, Any] = {} 36 field_dict.update(self.additional_properties) 37 field_dict.update( 38 { 39 "id": id, 40 "name": name, 41 "description": description, 42 "logoUrl": logo_url, 43 } 44 ) 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 50 d = src_dict.copy() 51 id = d.pop("id") 52 53 name = d.pop("name") 54 55 description = d.pop("description") 56 57 logo_url = d.pop("logoUrl") 58 59 login_provider = cls( 60 id=id, 61 name=name, 62 description=description, 63 logo_url=logo_url, 64 ) 65 66 login_provider.additional_properties = d 67 return login_provider 68 69 @property 70 def additional_keys(self) -> List[str]: 71 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- description (str):
- logo_url (str):
27def __init__(self, id, name, description, logo_url): 28 self.id = id 29 self.name = name 30 self.description = description 31 self.logo_url = logo_url 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class LoginProvider.
26 def to_dict(self) -> Dict[str, Any]: 27 id = self.id 28 29 name = self.name 30 31 description = self.description 32 33 logo_url = self.logo_url 34 35 field_dict: Dict[str, Any] = {} 36 field_dict.update(self.additional_properties) 37 field_dict.update( 38 { 39 "id": id, 40 "name": name, 41 "description": description, 42 "logoUrl": logo_url, 43 } 44 ) 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 50 d = src_dict.copy() 51 id = d.pop("id") 52 53 name = d.pop("name") 54 55 description = d.pop("description") 56 57 logo_url = d.pop("logoUrl") 58 59 login_provider = cls( 60 id=id, 61 name=name, 62 description=description, 63 logo_url=logo_url, 64 ) 65 66 login_provider.additional_properties = d 67 return login_provider
19@_attrs_define 20class Message: 21 """ 22 Attributes: 23 message_type (MessageType): 24 id (str): 25 message (str): 26 links (List['Entity']): 27 has_replies (bool): 28 created_by (str): 29 created_at (datetime.datetime): 30 updated_at (datetime.datetime): 31 parent_message_id (Union[None, Unset, str]): 32 """ 33 34 message_type: MessageType 35 id: str 36 message: str 37 links: List["Entity"] 38 has_replies: bool 39 created_by: str 40 created_at: datetime.datetime 41 updated_at: datetime.datetime 42 parent_message_id: Union[None, Unset, str] = UNSET 43 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 44 45 def to_dict(self) -> Dict[str, Any]: 46 message_type = self.message_type.value 47 48 id = self.id 49 50 message = self.message 51 52 links = [] 53 for links_item_data in self.links: 54 links_item = links_item_data.to_dict() 55 links.append(links_item) 56 57 has_replies = self.has_replies 58 59 created_by = self.created_by 60 61 created_at = self.created_at.isoformat() 62 63 updated_at = self.updated_at.isoformat() 64 65 parent_message_id: Union[None, Unset, str] 66 if isinstance(self.parent_message_id, Unset): 67 parent_message_id = UNSET 68 else: 69 parent_message_id = self.parent_message_id 70 71 field_dict: Dict[str, Any] = {} 72 field_dict.update(self.additional_properties) 73 field_dict.update( 74 { 75 "messageType": message_type, 76 "id": id, 77 "message": message, 78 "links": links, 79 "hasReplies": has_replies, 80 "createdBy": created_by, 81 "createdAt": created_at, 82 "updatedAt": updated_at, 83 } 84 ) 85 if parent_message_id is not UNSET: 86 field_dict["parentMessageId"] = parent_message_id 87 88 return field_dict 89 90 @classmethod 91 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 92 from ..models.entity import Entity 93 94 d = src_dict.copy() 95 message_type = MessageType(d.pop("messageType")) 96 97 id = d.pop("id") 98 99 message = d.pop("message") 100 101 links = [] 102 _links = d.pop("links") 103 for links_item_data in _links: 104 links_item = Entity.from_dict(links_item_data) 105 106 links.append(links_item) 107 108 has_replies = d.pop("hasReplies") 109 110 created_by = d.pop("createdBy") 111 112 created_at = isoparse(d.pop("createdAt")) 113 114 updated_at = isoparse(d.pop("updatedAt")) 115 116 def _parse_parent_message_id(data: object) -> Union[None, Unset, str]: 117 if data is None: 118 return data 119 if isinstance(data, Unset): 120 return data 121 return cast(Union[None, Unset, str], data) 122 123 parent_message_id = _parse_parent_message_id(d.pop("parentMessageId", UNSET)) 124 125 message = cls( 126 message_type=message_type, 127 id=id, 128 message=message, 129 links=links, 130 has_replies=has_replies, 131 created_by=created_by, 132 created_at=created_at, 133 updated_at=updated_at, 134 parent_message_id=parent_message_id, 135 ) 136 137 message.additional_properties = d 138 return message 139 140 @property 141 def additional_keys(self) -> List[str]: 142 return list(self.additional_properties.keys())
Attributes:
- message_type (MessageType):
- id (str):
- message (str):
- links (List['Entity']):
- has_replies (bool):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
- parent_message_id (Union[None, Unset, str]):
32def __init__(self, message_type, id, message, links, has_replies, created_by, created_at, updated_at, parent_message_id=attr_dict['parent_message_id'].default): 33 self.message_type = message_type 34 self.id = id 35 self.message = message 36 self.links = links 37 self.has_replies = has_replies 38 self.created_by = created_by 39 self.created_at = created_at 40 self.updated_at = updated_at 41 self.parent_message_id = parent_message_id 42 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Message.
45 def to_dict(self) -> Dict[str, Any]: 46 message_type = self.message_type.value 47 48 id = self.id 49 50 message = self.message 51 52 links = [] 53 for links_item_data in self.links: 54 links_item = links_item_data.to_dict() 55 links.append(links_item) 56 57 has_replies = self.has_replies 58 59 created_by = self.created_by 60 61 created_at = self.created_at.isoformat() 62 63 updated_at = self.updated_at.isoformat() 64 65 parent_message_id: Union[None, Unset, str] 66 if isinstance(self.parent_message_id, Unset): 67 parent_message_id = UNSET 68 else: 69 parent_message_id = self.parent_message_id 70 71 field_dict: Dict[str, Any] = {} 72 field_dict.update(self.additional_properties) 73 field_dict.update( 74 { 75 "messageType": message_type, 76 "id": id, 77 "message": message, 78 "links": links, 79 "hasReplies": has_replies, 80 "createdBy": created_by, 81 "createdAt": created_at, 82 "updatedAt": updated_at, 83 } 84 ) 85 if parent_message_id is not UNSET: 86 field_dict["parentMessageId"] = parent_message_id 87 88 return field_dict
90 @classmethod 91 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 92 from ..models.entity import Entity 93 94 d = src_dict.copy() 95 message_type = MessageType(d.pop("messageType")) 96 97 id = d.pop("id") 98 99 message = d.pop("message") 100 101 links = [] 102 _links = d.pop("links") 103 for links_item_data in _links: 104 links_item = Entity.from_dict(links_item_data) 105 106 links.append(links_item) 107 108 has_replies = d.pop("hasReplies") 109 110 created_by = d.pop("createdBy") 111 112 created_at = isoparse(d.pop("createdAt")) 113 114 updated_at = isoparse(d.pop("updatedAt")) 115 116 def _parse_parent_message_id(data: object) -> Union[None, Unset, str]: 117 if data is None: 118 return data 119 if isinstance(data, Unset): 120 return data 121 return cast(Union[None, Unset, str], data) 122 123 parent_message_id = _parse_parent_message_id(d.pop("parentMessageId", UNSET)) 124 125 message = cls( 126 message_type=message_type, 127 id=id, 128 message=message, 129 links=links, 130 has_replies=has_replies, 131 created_by=created_by, 132 created_at=created_at, 133 updated_at=updated_at, 134 parent_message_id=parent_message_id, 135 ) 136 137 message.additional_properties = d 138 return message
12@_attrs_define 13class MessageInput: 14 """ 15 Attributes: 16 message (str): 17 parent_message_id (Union[None, Unset, str]): 18 """ 19 20 message: str 21 parent_message_id: Union[None, Unset, str] = UNSET 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 message = self.message 26 27 parent_message_id: Union[None, Unset, str] 28 if isinstance(self.parent_message_id, Unset): 29 parent_message_id = UNSET 30 else: 31 parent_message_id = self.parent_message_id 32 33 field_dict: Dict[str, Any] = {} 34 field_dict.update(self.additional_properties) 35 field_dict.update( 36 { 37 "message": message, 38 } 39 ) 40 if parent_message_id is not UNSET: 41 field_dict["parentMessageId"] = parent_message_id 42 43 return field_dict 44 45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 d = src_dict.copy() 48 message = d.pop("message") 49 50 def _parse_parent_message_id(data: object) -> Union[None, Unset, str]: 51 if data is None: 52 return data 53 if isinstance(data, Unset): 54 return data 55 return cast(Union[None, Unset, str], data) 56 57 parent_message_id = _parse_parent_message_id(d.pop("parentMessageId", UNSET)) 58 59 message_input = cls( 60 message=message, 61 parent_message_id=parent_message_id, 62 ) 63 64 message_input.additional_properties = d 65 return message_input 66 67 @property 68 def additional_keys(self) -> List[str]: 69 return list(self.additional_properties.keys())
Attributes:
- message (str):
- parent_message_id (Union[None, Unset, str]):
25def __init__(self, message, parent_message_id=attr_dict['parent_message_id'].default): 26 self.message = message 27 self.parent_message_id = parent_message_id 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class MessageInput.
24 def to_dict(self) -> Dict[str, Any]: 25 message = self.message 26 27 parent_message_id: Union[None, Unset, str] 28 if isinstance(self.parent_message_id, Unset): 29 parent_message_id = UNSET 30 else: 31 parent_message_id = self.parent_message_id 32 33 field_dict: Dict[str, Any] = {} 34 field_dict.update(self.additional_properties) 35 field_dict.update( 36 { 37 "message": message, 38 } 39 ) 40 if parent_message_id is not UNSET: 41 field_dict["parentMessageId"] = parent_message_id 42 43 return field_dict
45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 d = src_dict.copy() 48 message = d.pop("message") 49 50 def _parse_parent_message_id(data: object) -> Union[None, Unset, str]: 51 if data is None: 52 return data 53 if isinstance(data, Unset): 54 return data 55 return cast(Union[None, Unset, str], data) 56 57 parent_message_id = _parse_parent_message_id(d.pop("parentMessageId", UNSET)) 58 59 message_input = cls( 60 message=message, 61 parent_message_id=parent_message_id, 62 ) 63 64 message_input.additional_properties = d 65 return message_input
5class MessageType(str, Enum): 6 SYSTEM = "SYSTEM" 7 USER = "USER" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
18@_attrs_define 19class MetricRecord: 20 """ 21 Attributes: 22 unit (str): 23 date (Union[Unset, datetime.date]): Date in ISO 8601 format 24 services (Union[Unset, MetricRecordServices]): Map of service names to metric value Example: {'Amazon Simple 25 Storage Service': 24.91}. 26 """ 27 28 unit: str 29 date: Union[Unset, datetime.date] = UNSET 30 services: Union[Unset, "MetricRecordServices"] = UNSET 31 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 32 33 def to_dict(self) -> Dict[str, Any]: 34 unit = self.unit 35 36 date: Union[Unset, str] = UNSET 37 if not isinstance(self.date, Unset): 38 date = self.date.isoformat() 39 40 services: Union[Unset, Dict[str, Any]] = UNSET 41 if not isinstance(self.services, Unset): 42 services = self.services.to_dict() 43 44 field_dict: Dict[str, Any] = {} 45 field_dict.update(self.additional_properties) 46 field_dict.update( 47 { 48 "unit": unit, 49 } 50 ) 51 if date is not UNSET: 52 field_dict["date"] = date 53 if services is not UNSET: 54 field_dict["services"] = services 55 56 return field_dict 57 58 @classmethod 59 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 60 from ..models.metric_record_services import MetricRecordServices 61 62 d = src_dict.copy() 63 unit = d.pop("unit") 64 65 _date = d.pop("date", UNSET) 66 date: Union[Unset, datetime.date] 67 if isinstance(_date, Unset): 68 date = UNSET 69 else: 70 date = isoparse(_date).date() 71 72 _services = d.pop("services", UNSET) 73 services: Union[Unset, MetricRecordServices] 74 if isinstance(_services, Unset): 75 services = UNSET 76 else: 77 services = MetricRecordServices.from_dict(_services) 78 79 metric_record = cls( 80 unit=unit, 81 date=date, 82 services=services, 83 ) 84 85 metric_record.additional_properties = d 86 return metric_record 87 88 @property 89 def additional_keys(self) -> List[str]: 90 return list(self.additional_properties.keys())
Attributes:
- unit (str):
- date (Union[Unset, datetime.date]): Date in ISO 8601 format
- services (Union[Unset, MetricRecordServices]): Map of service names to metric value Example: {'Amazon Simple Storage Service': 24.91}.
26def __init__(self, unit, date=attr_dict['date'].default, services=attr_dict['services'].default): 27 self.unit = unit 28 self.date = date 29 self.services = services 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class MetricRecord.
33 def to_dict(self) -> Dict[str, Any]: 34 unit = self.unit 35 36 date: Union[Unset, str] = UNSET 37 if not isinstance(self.date, Unset): 38 date = self.date.isoformat() 39 40 services: Union[Unset, Dict[str, Any]] = UNSET 41 if not isinstance(self.services, Unset): 42 services = self.services.to_dict() 43 44 field_dict: Dict[str, Any] = {} 45 field_dict.update(self.additional_properties) 46 field_dict.update( 47 { 48 "unit": unit, 49 } 50 ) 51 if date is not UNSET: 52 field_dict["date"] = date 53 if services is not UNSET: 54 field_dict["services"] = services 55 56 return field_dict
58 @classmethod 59 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 60 from ..models.metric_record_services import MetricRecordServices 61 62 d = src_dict.copy() 63 unit = d.pop("unit") 64 65 _date = d.pop("date", UNSET) 66 date: Union[Unset, datetime.date] 67 if isinstance(_date, Unset): 68 date = UNSET 69 else: 70 date = isoparse(_date).date() 71 72 _services = d.pop("services", UNSET) 73 services: Union[Unset, MetricRecordServices] 74 if isinstance(_services, Unset): 75 services = UNSET 76 else: 77 services = MetricRecordServices.from_dict(_services) 78 79 metric_record = cls( 80 unit=unit, 81 date=date, 82 services=services, 83 ) 84 85 metric_record.additional_properties = d 86 return metric_record
10@_attrs_define 11class MetricRecordServices: 12 """Map of service names to metric value 13 14 Example: 15 {'Amazon Simple Storage Service': 24.91} 16 17 """ 18 19 additional_properties: Dict[str, float] = _attrs_field(init=False, factory=dict) 20 21 def to_dict(self) -> Dict[str, Any]: 22 field_dict: Dict[str, Any] = {} 23 field_dict.update(self.additional_properties) 24 field_dict.update({}) 25 26 return field_dict 27 28 @classmethod 29 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 30 d = src_dict.copy() 31 metric_record_services = cls() 32 33 metric_record_services.additional_properties = d 34 return metric_record_services 35 36 @property 37 def additional_keys(self) -> List[str]: 38 return list(self.additional_properties.keys())
Map of service names to metric value
Example:
{'Amazon Simple Storage Service': 24.91}
10@_attrs_define 11class MoveDatasetInput: 12 """ 13 Attributes: 14 dataset_id (str): 15 source_project_id (str): 16 target_project_id (str): 17 """ 18 19 dataset_id: str 20 source_project_id: str 21 target_project_id: str 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 dataset_id = self.dataset_id 26 27 source_project_id = self.source_project_id 28 29 target_project_id = self.target_project_id 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "datasetId": dataset_id, 36 "sourceProjectId": source_project_id, 37 "targetProjectId": target_project_id, 38 } 39 ) 40 41 return field_dict 42 43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 dataset_id = d.pop("datasetId") 47 48 source_project_id = d.pop("sourceProjectId") 49 50 target_project_id = d.pop("targetProjectId") 51 52 move_dataset_input = cls( 53 dataset_id=dataset_id, 54 source_project_id=source_project_id, 55 target_project_id=target_project_id, 56 ) 57 58 move_dataset_input.additional_properties = d 59 return move_dataset_input 60 61 @property 62 def additional_keys(self) -> List[str]: 63 return list(self.additional_properties.keys())
Attributes:
- dataset_id (str):
- source_project_id (str):
- target_project_id (str):
26def __init__(self, dataset_id, source_project_id, target_project_id): 27 self.dataset_id = dataset_id 28 self.source_project_id = source_project_id 29 self.target_project_id = target_project_id 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class MoveDatasetInput.
24 def to_dict(self) -> Dict[str, Any]: 25 dataset_id = self.dataset_id 26 27 source_project_id = self.source_project_id 28 29 target_project_id = self.target_project_id 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "datasetId": dataset_id, 36 "sourceProjectId": source_project_id, 37 "targetProjectId": target_project_id, 38 } 39 ) 40 41 return field_dict
43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 dataset_id = d.pop("datasetId") 47 48 source_project_id = d.pop("sourceProjectId") 49 50 target_project_id = d.pop("targetProjectId") 51 52 move_dataset_input = cls( 53 dataset_id=dataset_id, 54 source_project_id=source_project_id, 55 target_project_id=target_project_id, 56 ) 57 58 move_dataset_input.additional_properties = d 59 return move_dataset_input
10@_attrs_define 11class MoveDatasetResponse: 12 """ 13 Attributes: 14 s_3_copy_command (str): 15 s_3_delete_command (str): 16 samples_not_moved (List[str]): 17 """ 18 19 s_3_copy_command: str 20 s_3_delete_command: str 21 samples_not_moved: List[str] 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 s_3_copy_command = self.s_3_copy_command 26 27 s_3_delete_command = self.s_3_delete_command 28 29 samples_not_moved = self.samples_not_moved 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "s3CopyCommand": s_3_copy_command, 36 "s3DeleteCommand": s_3_delete_command, 37 "samplesNotMoved": samples_not_moved, 38 } 39 ) 40 41 return field_dict 42 43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 s_3_copy_command = d.pop("s3CopyCommand") 47 48 s_3_delete_command = d.pop("s3DeleteCommand") 49 50 samples_not_moved = cast(List[str], d.pop("samplesNotMoved")) 51 52 move_dataset_response = cls( 53 s_3_copy_command=s_3_copy_command, 54 s_3_delete_command=s_3_delete_command, 55 samples_not_moved=samples_not_moved, 56 ) 57 58 move_dataset_response.additional_properties = d 59 return move_dataset_response 60 61 @property 62 def additional_keys(self) -> List[str]: 63 return list(self.additional_properties.keys())
Attributes:
- s_3_copy_command (str):
- s_3_delete_command (str):
- samples_not_moved (List[str]):
26def __init__(self, s_3_copy_command, s_3_delete_command, samples_not_moved): 27 self.s_3_copy_command = s_3_copy_command 28 self.s_3_delete_command = s_3_delete_command 29 self.samples_not_moved = samples_not_moved 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class MoveDatasetResponse.
24 def to_dict(self) -> Dict[str, Any]: 25 s_3_copy_command = self.s_3_copy_command 26 27 s_3_delete_command = self.s_3_delete_command 28 29 samples_not_moved = self.samples_not_moved 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "s3CopyCommand": s_3_copy_command, 36 "s3DeleteCommand": s_3_delete_command, 37 "samplesNotMoved": samples_not_moved, 38 } 39 ) 40 41 return field_dict
43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 s_3_copy_command = d.pop("s3CopyCommand") 47 48 s_3_delete_command = d.pop("s3DeleteCommand") 49 50 samples_not_moved = cast(List[str], d.pop("samplesNotMoved")) 51 52 move_dataset_response = cls( 53 s_3_copy_command=s_3_copy_command, 54 s_3_delete_command=s_3_delete_command, 55 samples_not_moved=samples_not_moved, 56 ) 57 58 move_dataset_response.additional_properties = d 59 return move_dataset_response
10@_attrs_define 11class NamedItem: 12 """ 13 Attributes: 14 id (str): 15 name (str): 16 """ 17 18 id: str 19 name: str 20 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 21 22 def to_dict(self) -> Dict[str, Any]: 23 id = self.id 24 25 name = self.name 26 27 field_dict: Dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "id": id, 32 "name": name, 33 } 34 ) 35 36 return field_dict 37 38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 id = d.pop("id") 42 43 name = d.pop("name") 44 45 named_item = cls( 46 id=id, 47 name=name, 48 ) 49 50 named_item.additional_properties = d 51 return named_item 52 53 @property 54 def additional_keys(self) -> List[str]: 55 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
25def __init__(self, id, name): 26 self.id = id 27 self.name = name 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class NamedItem.
14@_attrs_define 15class NotebookInstance: 16 """ 17 Attributes: 18 id (str): 19 name (str): 20 status (Status): 21 status_message (str): 22 instance_type (str): 23 accelerator_types (List[str]): 24 git_repositories (List[str]): 25 volume_size_gb (int): 26 is_shared_with_project (bool): 27 created_by (str): 28 created_at (datetime.datetime): 29 updated_at (datetime.datetime): 30 """ 31 32 id: str 33 name: str 34 status: Status 35 status_message: str 36 instance_type: str 37 accelerator_types: List[str] 38 git_repositories: List[str] 39 volume_size_gb: int 40 is_shared_with_project: bool 41 created_by: str 42 created_at: datetime.datetime 43 updated_at: datetime.datetime 44 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 45 46 def to_dict(self) -> Dict[str, Any]: 47 id = self.id 48 49 name = self.name 50 51 status = self.status.value 52 53 status_message = self.status_message 54 55 instance_type = self.instance_type 56 57 accelerator_types = self.accelerator_types 58 59 git_repositories = self.git_repositories 60 61 volume_size_gb = self.volume_size_gb 62 63 is_shared_with_project = self.is_shared_with_project 64 65 created_by = self.created_by 66 67 created_at = self.created_at.isoformat() 68 69 updated_at = self.updated_at.isoformat() 70 71 field_dict: Dict[str, Any] = {} 72 field_dict.update(self.additional_properties) 73 field_dict.update( 74 { 75 "id": id, 76 "name": name, 77 "status": status, 78 "statusMessage": status_message, 79 "instanceType": instance_type, 80 "acceleratorTypes": accelerator_types, 81 "gitRepositories": git_repositories, 82 "volumeSizeGB": volume_size_gb, 83 "isSharedWithProject": is_shared_with_project, 84 "createdBy": created_by, 85 "createdAt": created_at, 86 "updatedAt": updated_at, 87 } 88 ) 89 90 return field_dict 91 92 @classmethod 93 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 94 d = src_dict.copy() 95 id = d.pop("id") 96 97 name = d.pop("name") 98 99 status = Status(d.pop("status")) 100 101 status_message = d.pop("statusMessage") 102 103 instance_type = d.pop("instanceType") 104 105 accelerator_types = cast(List[str], d.pop("acceleratorTypes")) 106 107 git_repositories = cast(List[str], d.pop("gitRepositories")) 108 109 volume_size_gb = d.pop("volumeSizeGB") 110 111 is_shared_with_project = d.pop("isSharedWithProject") 112 113 created_by = d.pop("createdBy") 114 115 created_at = isoparse(d.pop("createdAt")) 116 117 updated_at = isoparse(d.pop("updatedAt")) 118 119 notebook_instance = cls( 120 id=id, 121 name=name, 122 status=status, 123 status_message=status_message, 124 instance_type=instance_type, 125 accelerator_types=accelerator_types, 126 git_repositories=git_repositories, 127 volume_size_gb=volume_size_gb, 128 is_shared_with_project=is_shared_with_project, 129 created_by=created_by, 130 created_at=created_at, 131 updated_at=updated_at, 132 ) 133 134 notebook_instance.additional_properties = d 135 return notebook_instance 136 137 @property 138 def additional_keys(self) -> List[str]: 139 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- status (Status):
- status_message (str):
- instance_type (str):
- accelerator_types (List[str]):
- git_repositories (List[str]):
- volume_size_gb (int):
- is_shared_with_project (bool):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
35def __init__(self, id, name, status, status_message, instance_type, accelerator_types, git_repositories, volume_size_gb, is_shared_with_project, created_by, created_at, updated_at): 36 self.id = id 37 self.name = name 38 self.status = status 39 self.status_message = status_message 40 self.instance_type = instance_type 41 self.accelerator_types = accelerator_types 42 self.git_repositories = git_repositories 43 self.volume_size_gb = volume_size_gb 44 self.is_shared_with_project = is_shared_with_project 45 self.created_by = created_by 46 self.created_at = created_at 47 self.updated_at = updated_at 48 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class NotebookInstance.
46 def to_dict(self) -> Dict[str, Any]: 47 id = self.id 48 49 name = self.name 50 51 status = self.status.value 52 53 status_message = self.status_message 54 55 instance_type = self.instance_type 56 57 accelerator_types = self.accelerator_types 58 59 git_repositories = self.git_repositories 60 61 volume_size_gb = self.volume_size_gb 62 63 is_shared_with_project = self.is_shared_with_project 64 65 created_by = self.created_by 66 67 created_at = self.created_at.isoformat() 68 69 updated_at = self.updated_at.isoformat() 70 71 field_dict: Dict[str, Any] = {} 72 field_dict.update(self.additional_properties) 73 field_dict.update( 74 { 75 "id": id, 76 "name": name, 77 "status": status, 78 "statusMessage": status_message, 79 "instanceType": instance_type, 80 "acceleratorTypes": accelerator_types, 81 "gitRepositories": git_repositories, 82 "volumeSizeGB": volume_size_gb, 83 "isSharedWithProject": is_shared_with_project, 84 "createdBy": created_by, 85 "createdAt": created_at, 86 "updatedAt": updated_at, 87 } 88 ) 89 90 return field_dict
92 @classmethod 93 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 94 d = src_dict.copy() 95 id = d.pop("id") 96 97 name = d.pop("name") 98 99 status = Status(d.pop("status")) 100 101 status_message = d.pop("statusMessage") 102 103 instance_type = d.pop("instanceType") 104 105 accelerator_types = cast(List[str], d.pop("acceleratorTypes")) 106 107 git_repositories = cast(List[str], d.pop("gitRepositories")) 108 109 volume_size_gb = d.pop("volumeSizeGB") 110 111 is_shared_with_project = d.pop("isSharedWithProject") 112 113 created_by = d.pop("createdBy") 114 115 created_at = isoparse(d.pop("createdAt")) 116 117 updated_at = isoparse(d.pop("updatedAt")) 118 119 notebook_instance = cls( 120 id=id, 121 name=name, 122 status=status, 123 status_message=status_message, 124 instance_type=instance_type, 125 accelerator_types=accelerator_types, 126 git_repositories=git_repositories, 127 volume_size_gb=volume_size_gb, 128 is_shared_with_project=is_shared_with_project, 129 created_by=created_by, 130 created_at=created_at, 131 updated_at=updated_at, 132 ) 133 134 notebook_instance.additional_properties = d 135 return notebook_instance
10@_attrs_define 11class NotebookInstanceStatusResponse: 12 """ 13 Attributes: 14 status (str): 15 status_message (str): 16 """ 17 18 status: str 19 status_message: str 20 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 21 22 def to_dict(self) -> Dict[str, Any]: 23 status = self.status 24 25 status_message = self.status_message 26 27 field_dict: Dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "status": status, 32 "statusMessage": status_message, 33 } 34 ) 35 36 return field_dict 37 38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 status = d.pop("status") 42 43 status_message = d.pop("statusMessage") 44 45 notebook_instance_status_response = cls( 46 status=status, 47 status_message=status_message, 48 ) 49 50 notebook_instance_status_response.additional_properties = d 51 return notebook_instance_status_response 52 53 @property 54 def additional_keys(self) -> List[str]: 55 return list(self.additional_properties.keys())
Attributes:
- status (str):
- status_message (str):
25def __init__(self, status, status_message): 26 self.status = status 27 self.status_message = status_message 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class NotebookInstanceStatusResponse.
22 def to_dict(self) -> Dict[str, Any]: 23 status = self.status 24 25 status_message = self.status_message 26 27 field_dict: Dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "status": status, 32 "statusMessage": status_message, 33 } 34 ) 35 36 return field_dict
38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 status = d.pop("status") 42 43 status_message = d.pop("statusMessage") 44 45 notebook_instance_status_response = cls( 46 status=status, 47 status_message=status_message, 48 ) 49 50 notebook_instance_status_response.additional_properties = d 51 return notebook_instance_status_response
10@_attrs_define 11class OpenNotebookInstanceResponse: 12 """ 13 Attributes: 14 url (str): 15 message (str): 16 """ 17 18 url: str 19 message: str 20 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 21 22 def to_dict(self) -> Dict[str, Any]: 23 url = self.url 24 25 message = self.message 26 27 field_dict: Dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "url": url, 32 "message": message, 33 } 34 ) 35 36 return field_dict 37 38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 url = d.pop("url") 42 43 message = d.pop("message") 44 45 open_notebook_instance_response = cls( 46 url=url, 47 message=message, 48 ) 49 50 open_notebook_instance_response.additional_properties = d 51 return open_notebook_instance_response 52 53 @property 54 def additional_keys(self) -> List[str]: 55 return list(self.additional_properties.keys())
Attributes:
- url (str):
- message (str):
25def __init__(self, url, message): 26 self.url = url 27 self.message = message 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class OpenNotebookInstanceResponse.
38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 url = d.pop("url") 42 43 message = d.pop("message") 44 45 open_notebook_instance_response = cls( 46 url=url, 47 message=message, 48 ) 49 50 open_notebook_instance_response.additional_properties = d 51 return open_notebook_instance_response
14@_attrs_define 15class PaginatedResponseDatasetListDto: 16 """ 17 Attributes: 18 data (List['Dataset']): 19 next_token (str): 20 """ 21 22 data: List["Dataset"] 23 next_token: str 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 data = [] 28 for data_item_data in self.data: 29 data_item = data_item_data.to_dict() 30 data.append(data_item) 31 32 next_token = self.next_token 33 34 field_dict: Dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "data": data, 39 "nextToken": next_token, 40 } 41 ) 42 43 return field_dict 44 45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 from ..models.dataset import Dataset 48 49 d = src_dict.copy() 50 data = [] 51 _data = d.pop("data") 52 for data_item_data in _data: 53 data_item = Dataset.from_dict(data_item_data) 54 55 data.append(data_item) 56 57 next_token = d.pop("nextToken") 58 59 paginated_response_dataset_list_dto = cls( 60 data=data, 61 next_token=next_token, 62 ) 63 64 paginated_response_dataset_list_dto.additional_properties = d 65 return paginated_response_dataset_list_dto 66 67 @property 68 def additional_keys(self) -> List[str]: 69 return list(self.additional_properties.keys())
Attributes:
- data (List['Dataset']):
- next_token (str):
25def __init__(self, data, next_token): 26 self.data = data 27 self.next_token = next_token 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PaginatedResponseDatasetListDto.
26 def to_dict(self) -> Dict[str, Any]: 27 data = [] 28 for data_item_data in self.data: 29 data_item = data_item_data.to_dict() 30 data.append(data_item) 31 32 next_token = self.next_token 33 34 field_dict: Dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "data": data, 39 "nextToken": next_token, 40 } 41 ) 42 43 return field_dict
45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 from ..models.dataset import Dataset 48 49 d = src_dict.copy() 50 data = [] 51 _data = d.pop("data") 52 for data_item_data in _data: 53 data_item = Dataset.from_dict(data_item_data) 54 55 data.append(data_item) 56 57 next_token = d.pop("nextToken") 58 59 paginated_response_dataset_list_dto = cls( 60 data=data, 61 next_token=next_token, 62 ) 63 64 paginated_response_dataset_list_dto.additional_properties = d 65 return paginated_response_dataset_list_dto
14@_attrs_define 15class PaginatedResponseDiscussion: 16 """ 17 Attributes: 18 data (List['Discussion']): 19 next_token (str): 20 """ 21 22 data: List["Discussion"] 23 next_token: str 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 data = [] 28 for data_item_data in self.data: 29 data_item = data_item_data.to_dict() 30 data.append(data_item) 31 32 next_token = self.next_token 33 34 field_dict: Dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "data": data, 39 "nextToken": next_token, 40 } 41 ) 42 43 return field_dict 44 45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 from ..models.discussion import Discussion 48 49 d = src_dict.copy() 50 data = [] 51 _data = d.pop("data") 52 for data_item_data in _data: 53 data_item = Discussion.from_dict(data_item_data) 54 55 data.append(data_item) 56 57 next_token = d.pop("nextToken") 58 59 paginated_response_discussion = cls( 60 data=data, 61 next_token=next_token, 62 ) 63 64 paginated_response_discussion.additional_properties = d 65 return paginated_response_discussion 66 67 @property 68 def additional_keys(self) -> List[str]: 69 return list(self.additional_properties.keys())
Attributes:
- data (List['Discussion']):
- next_token (str):
25def __init__(self, data, next_token): 26 self.data = data 27 self.next_token = next_token 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PaginatedResponseDiscussion.
26 def to_dict(self) -> Dict[str, Any]: 27 data = [] 28 for data_item_data in self.data: 29 data_item = data_item_data.to_dict() 30 data.append(data_item) 31 32 next_token = self.next_token 33 34 field_dict: Dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "data": data, 39 "nextToken": next_token, 40 } 41 ) 42 43 return field_dict
45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 from ..models.discussion import Discussion 48 49 d = src_dict.copy() 50 data = [] 51 _data = d.pop("data") 52 for data_item_data in _data: 53 data_item = Discussion.from_dict(data_item_data) 54 55 data.append(data_item) 56 57 next_token = d.pop("nextToken") 58 59 paginated_response_discussion = cls( 60 data=data, 61 next_token=next_token, 62 ) 63 64 paginated_response_discussion.additional_properties = d 65 return paginated_response_discussion
14@_attrs_define 15class PaginatedResponseMessage: 16 """ 17 Attributes: 18 data (List['Message']): 19 next_token (str): 20 """ 21 22 data: List["Message"] 23 next_token: str 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 data = [] 28 for data_item_data in self.data: 29 data_item = data_item_data.to_dict() 30 data.append(data_item) 31 32 next_token = self.next_token 33 34 field_dict: Dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "data": data, 39 "nextToken": next_token, 40 } 41 ) 42 43 return field_dict 44 45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 from ..models.message import Message 48 49 d = src_dict.copy() 50 data = [] 51 _data = d.pop("data") 52 for data_item_data in _data: 53 data_item = Message.from_dict(data_item_data) 54 55 data.append(data_item) 56 57 next_token = d.pop("nextToken") 58 59 paginated_response_message = cls( 60 data=data, 61 next_token=next_token, 62 ) 63 64 paginated_response_message.additional_properties = d 65 return paginated_response_message 66 67 @property 68 def additional_keys(self) -> List[str]: 69 return list(self.additional_properties.keys())
Attributes:
- data (List['Message']):
- next_token (str):
25def __init__(self, data, next_token): 26 self.data = data 27 self.next_token = next_token 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PaginatedResponseMessage.
26 def to_dict(self) -> Dict[str, Any]: 27 data = [] 28 for data_item_data in self.data: 29 data_item = data_item_data.to_dict() 30 data.append(data_item) 31 32 next_token = self.next_token 33 34 field_dict: Dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "data": data, 39 "nextToken": next_token, 40 } 41 ) 42 43 return field_dict
45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 from ..models.message import Message 48 49 d = src_dict.copy() 50 data = [] 51 _data = d.pop("data") 52 for data_item_data in _data: 53 data_item = Message.from_dict(data_item_data) 54 55 data.append(data_item) 56 57 next_token = d.pop("nextToken") 58 59 paginated_response_message = cls( 60 data=data, 61 next_token=next_token, 62 ) 63 64 paginated_response_message.additional_properties = d 65 return paginated_response_message
14@_attrs_define 15class PaginatedResponseSampleDto: 16 """ 17 Attributes: 18 data (List['Sample']): 19 next_token (str): 20 """ 21 22 data: List["Sample"] 23 next_token: str 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 data = [] 28 for data_item_data in self.data: 29 data_item = data_item_data.to_dict() 30 data.append(data_item) 31 32 next_token = self.next_token 33 34 field_dict: Dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "data": data, 39 "nextToken": next_token, 40 } 41 ) 42 43 return field_dict 44 45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 from ..models.sample import Sample 48 49 d = src_dict.copy() 50 data = [] 51 _data = d.pop("data") 52 for data_item_data in _data: 53 data_item = Sample.from_dict(data_item_data) 54 55 data.append(data_item) 56 57 next_token = d.pop("nextToken") 58 59 paginated_response_sample_dto = cls( 60 data=data, 61 next_token=next_token, 62 ) 63 64 paginated_response_sample_dto.additional_properties = d 65 return paginated_response_sample_dto 66 67 @property 68 def additional_keys(self) -> List[str]: 69 return list(self.additional_properties.keys())
Attributes:
- data (List['Sample']):
- next_token (str):
25def __init__(self, data, next_token): 26 self.data = data 27 self.next_token = next_token 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PaginatedResponseSampleDto.
26 def to_dict(self) -> Dict[str, Any]: 27 data = [] 28 for data_item_data in self.data: 29 data_item = data_item_data.to_dict() 30 data.append(data_item) 31 32 next_token = self.next_token 33 34 field_dict: Dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "data": data, 39 "nextToken": next_token, 40 } 41 ) 42 43 return field_dict
45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 from ..models.sample import Sample 48 49 d = src_dict.copy() 50 data = [] 51 _data = d.pop("data") 52 for data_item_data in _data: 53 data_item = Sample.from_dict(data_item_data) 54 55 data.append(data_item) 56 57 next_token = d.pop("nextToken") 58 59 paginated_response_sample_dto = cls( 60 data=data, 61 next_token=next_token, 62 ) 63 64 paginated_response_sample_dto.additional_properties = d 65 return paginated_response_sample_dto
14@_attrs_define 15class PaginatedResponseUserDto: 16 """ 17 Attributes: 18 data (List['User']): 19 next_token (str): 20 """ 21 22 data: List["User"] 23 next_token: str 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 data = [] 28 for data_item_data in self.data: 29 data_item = data_item_data.to_dict() 30 data.append(data_item) 31 32 next_token = self.next_token 33 34 field_dict: Dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "data": data, 39 "nextToken": next_token, 40 } 41 ) 42 43 return field_dict 44 45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 from ..models.user import User 48 49 d = src_dict.copy() 50 data = [] 51 _data = d.pop("data") 52 for data_item_data in _data: 53 data_item = User.from_dict(data_item_data) 54 55 data.append(data_item) 56 57 next_token = d.pop("nextToken") 58 59 paginated_response_user_dto = cls( 60 data=data, 61 next_token=next_token, 62 ) 63 64 paginated_response_user_dto.additional_properties = d 65 return paginated_response_user_dto 66 67 @property 68 def additional_keys(self) -> List[str]: 69 return list(self.additional_properties.keys())
Attributes:
- data (List['User']):
- next_token (str):
25def __init__(self, data, next_token): 26 self.data = data 27 self.next_token = next_token 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PaginatedResponseUserDto.
26 def to_dict(self) -> Dict[str, Any]: 27 data = [] 28 for data_item_data in self.data: 29 data_item = data_item_data.to_dict() 30 data.append(data_item) 31 32 next_token = self.next_token 33 34 field_dict: Dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "data": data, 39 "nextToken": next_token, 40 } 41 ) 42 43 return field_dict
45 @classmethod 46 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 47 from ..models.user import User 48 49 d = src_dict.copy() 50 data = [] 51 _data = d.pop("data") 52 for data_item_data in _data: 53 data_item = User.from_dict(data_item_data) 54 55 data.append(data_item) 56 57 next_token = d.pop("nextToken") 58 59 paginated_response_user_dto = cls( 60 data=data, 61 next_token=next_token, 62 ) 63 64 paginated_response_user_dto.additional_properties = d 65 return paginated_response_user_dto
12@_attrs_define 13class PipelineCode: 14 """Used to describe the pipeline analysis code, not required for ingest processes 15 16 Attributes: 17 repository_path (str): GitHub repository which contains the workflow code Example: nf-core/rnaseq. 18 version (str): Branch, tag, or commit hash of the pipeline code Example: main. 19 repository_type (RepositoryType): Type of repository 20 entry_point (str): Main script for running the pipeline Example: main.nf. 21 """ 22 23 repository_path: str 24 version: str 25 repository_type: RepositoryType 26 entry_point: str 27 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 28 29 def to_dict(self) -> Dict[str, Any]: 30 repository_path = self.repository_path 31 32 version = self.version 33 34 repository_type = self.repository_type.value 35 36 entry_point = self.entry_point 37 38 field_dict: Dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "repositoryPath": repository_path, 43 "version": version, 44 "repositoryType": repository_type, 45 "entryPoint": entry_point, 46 } 47 ) 48 49 return field_dict 50 51 @classmethod 52 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 53 d = src_dict.copy() 54 repository_path = d.pop("repositoryPath") 55 56 version = d.pop("version") 57 58 repository_type = RepositoryType(d.pop("repositoryType")) 59 60 entry_point = d.pop("entryPoint") 61 62 pipeline_code = cls( 63 repository_path=repository_path, 64 version=version, 65 repository_type=repository_type, 66 entry_point=entry_point, 67 ) 68 69 pipeline_code.additional_properties = d 70 return pipeline_code 71 72 @property 73 def additional_keys(self) -> List[str]: 74 return list(self.additional_properties.keys())
Used to describe the pipeline analysis code, not required for ingest processes
Attributes:
- repository_path (str): GitHub repository which contains the workflow code Example: nf-core/rnaseq.
- version (str): Branch, tag, or commit hash of the pipeline code Example: main.
- repository_type (RepositoryType): Type of repository
- entry_point (str): Main script for running the pipeline Example: main.nf.
27def __init__(self, repository_path, version, repository_type, entry_point): 28 self.repository_path = repository_path 29 self.version = version 30 self.repository_type = repository_type 31 self.entry_point = entry_point 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PipelineCode.
29 def to_dict(self) -> Dict[str, Any]: 30 repository_path = self.repository_path 31 32 version = self.version 33 34 repository_type = self.repository_type.value 35 36 entry_point = self.entry_point 37 38 field_dict: Dict[str, Any] = {} 39 field_dict.update(self.additional_properties) 40 field_dict.update( 41 { 42 "repositoryPath": repository_path, 43 "version": version, 44 "repositoryType": repository_type, 45 "entryPoint": entry_point, 46 } 47 ) 48 49 return field_dict
51 @classmethod 52 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 53 d = src_dict.copy() 54 repository_path = d.pop("repositoryPath") 55 56 version = d.pop("version") 57 58 repository_type = RepositoryType(d.pop("repositoryType")) 59 60 entry_point = d.pop("entryPoint") 61 62 pipeline_code = cls( 63 repository_path=repository_path, 64 version=version, 65 repository_type=repository_type, 66 entry_point=entry_point, 67 ) 68 69 pipeline_code.additional_properties = d 70 return pipeline_code
12@_attrs_define 13class PipelineCost: 14 """ 15 Attributes: 16 total_cost (Union[None, Unset, float]): The total cost of running the pipeline 17 is_estimate (Union[Unset, bool]): Is this an estimate of the cost? 18 description (Union[Unset, str]): Description of the cost calculation 19 """ 20 21 total_cost: Union[None, Unset, float] = UNSET 22 is_estimate: Union[Unset, bool] = UNSET 23 description: Union[Unset, str] = UNSET 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 total_cost: Union[None, Unset, float] 28 if isinstance(self.total_cost, Unset): 29 total_cost = UNSET 30 else: 31 total_cost = self.total_cost 32 33 is_estimate = self.is_estimate 34 35 description = self.description 36 37 field_dict: Dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update({}) 40 if total_cost is not UNSET: 41 field_dict["totalCost"] = total_cost 42 if is_estimate is not UNSET: 43 field_dict["isEstimate"] = is_estimate 44 if description is not UNSET: 45 field_dict["description"] = description 46 47 return field_dict 48 49 @classmethod 50 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 51 d = src_dict.copy() 52 53 def _parse_total_cost(data: object) -> Union[None, Unset, float]: 54 if data is None: 55 return data 56 if isinstance(data, Unset): 57 return data 58 return cast(Union[None, Unset, float], data) 59 60 total_cost = _parse_total_cost(d.pop("totalCost", UNSET)) 61 62 is_estimate = d.pop("isEstimate", UNSET) 63 64 description = d.pop("description", UNSET) 65 66 pipeline_cost = cls( 67 total_cost=total_cost, 68 is_estimate=is_estimate, 69 description=description, 70 ) 71 72 pipeline_cost.additional_properties = d 73 return pipeline_cost 74 75 @property 76 def additional_keys(self) -> List[str]: 77 return list(self.additional_properties.keys())
Attributes:
- total_cost (Union[None, Unset, float]): The total cost of running the pipeline
- is_estimate (Union[Unset, bool]): Is this an estimate of the cost?
- description (Union[Unset, str]): Description of the cost calculation
26def __init__(self, total_cost=attr_dict['total_cost'].default, is_estimate=attr_dict['is_estimate'].default, description=attr_dict['description'].default): 27 self.total_cost = total_cost 28 self.is_estimate = is_estimate 29 self.description = description 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PipelineCost.
26 def to_dict(self) -> Dict[str, Any]: 27 total_cost: Union[None, Unset, float] 28 if isinstance(self.total_cost, Unset): 29 total_cost = UNSET 30 else: 31 total_cost = self.total_cost 32 33 is_estimate = self.is_estimate 34 35 description = self.description 36 37 field_dict: Dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update({}) 40 if total_cost is not UNSET: 41 field_dict["totalCost"] = total_cost 42 if is_estimate is not UNSET: 43 field_dict["isEstimate"] = is_estimate 44 if description is not UNSET: 45 field_dict["description"] = description 46 47 return field_dict
49 @classmethod 50 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 51 d = src_dict.copy() 52 53 def _parse_total_cost(data: object) -> Union[None, Unset, float]: 54 if data is None: 55 return data 56 if isinstance(data, Unset): 57 return data 58 return cast(Union[None, Unset, float], data) 59 60 total_cost = _parse_total_cost(d.pop("totalCost", UNSET)) 61 62 is_estimate = d.pop("isEstimate", UNSET) 63 64 description = d.pop("description", UNSET) 65 66 pipeline_cost = cls( 67 total_cost=total_cost, 68 is_estimate=is_estimate, 69 description=description, 70 ) 71 72 pipeline_cost.additional_properties = d 73 return pipeline_cost
14@_attrs_define 15class PortalErrorResponse: 16 """ 17 Attributes: 18 status_code (int): 19 error_code (str): 20 error_detail (str): 21 errors (List['ErrorMessage']): 22 """ 23 24 status_code: int 25 error_code: str 26 error_detail: str 27 errors: List["ErrorMessage"] 28 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 29 30 def to_dict(self) -> Dict[str, Any]: 31 status_code = self.status_code 32 33 error_code = self.error_code 34 35 error_detail = self.error_detail 36 37 errors = [] 38 for errors_item_data in self.errors: 39 errors_item = errors_item_data.to_dict() 40 errors.append(errors_item) 41 42 field_dict: Dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "statusCode": status_code, 47 "errorCode": error_code, 48 "errorDetail": error_detail, 49 "errors": errors, 50 } 51 ) 52 53 return field_dict 54 55 @classmethod 56 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 57 from ..models.error_message import ErrorMessage 58 59 d = src_dict.copy() 60 status_code = d.pop("statusCode") 61 62 error_code = d.pop("errorCode") 63 64 error_detail = d.pop("errorDetail") 65 66 errors = [] 67 _errors = d.pop("errors") 68 for errors_item_data in _errors: 69 errors_item = ErrorMessage.from_dict(errors_item_data) 70 71 errors.append(errors_item) 72 73 portal_error_response = cls( 74 status_code=status_code, 75 error_code=error_code, 76 error_detail=error_detail, 77 errors=errors, 78 ) 79 80 portal_error_response.additional_properties = d 81 return portal_error_response 82 83 @property 84 def additional_keys(self) -> List[str]: 85 return list(self.additional_properties.keys())
Attributes:
- status_code (int):
- error_code (str):
- error_detail (str):
- errors (List['ErrorMessage']):
27def __init__(self, status_code, error_code, error_detail, errors): 28 self.status_code = status_code 29 self.error_code = error_code 30 self.error_detail = error_detail 31 self.errors = errors 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class PortalErrorResponse.
30 def to_dict(self) -> Dict[str, Any]: 31 status_code = self.status_code 32 33 error_code = self.error_code 34 35 error_detail = self.error_detail 36 37 errors = [] 38 for errors_item_data in self.errors: 39 errors_item = errors_item_data.to_dict() 40 errors.append(errors_item) 41 42 field_dict: Dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "statusCode": status_code, 47 "errorCode": error_code, 48 "errorDetail": error_detail, 49 "errors": errors, 50 } 51 ) 52 53 return field_dict
55 @classmethod 56 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 57 from ..models.error_message import ErrorMessage 58 59 d = src_dict.copy() 60 status_code = d.pop("statusCode") 61 62 error_code = d.pop("errorCode") 63 64 error_detail = d.pop("errorDetail") 65 66 errors = [] 67 _errors = d.pop("errors") 68 for errors_item_data in _errors: 69 errors_item = ErrorMessage.from_dict(errors_item_data) 70 71 errors.append(errors_item) 72 73 portal_error_response = cls( 74 status_code=status_code, 75 error_code=error_code, 76 error_detail=error_detail, 77 errors=errors, 78 ) 79 80 portal_error_response.additional_properties = d 81 return portal_error_response
15@_attrs_define 16class Process: 17 """Identifies a data type or pipeline in Cirro 18 19 Attributes: 20 id (str): Unique ID of the Process Example: process-hutch-magic_flute-1_0. 21 name (str): Friendly name for the process Example: MAGeCK Flute. 22 description (str): Description of the process Example: MAGeCK Flute enables accurate identification of essential 23 genes with their related biological functions. 24 data_type (str): Name of the data type this pipeline produces (if it is not defined, use the name) 25 executor (Executor): How the workflow is executed 26 child_process_ids (List[str]): IDs of pipelines that can be run downstream 27 parent_process_ids (List[str]): IDs of processes that can run this pipeline 28 linked_project_ids (List[str]): Projects that can run this process 29 is_tenant_wide (bool): Whether the process is shared with the tenant 30 allow_multiple_sources (bool): Whether the pipeline is allowed to have multiple dataset sources 31 uses_sample_sheet (bool): Whether the pipeline uses the Cirro-provided sample sheet 32 is_archived (bool): Whether the process is marked as archived 33 category (Union[Unset, str]): Category of the process Example: Microbial Analysis. 34 pipeline_type (Union[Unset, str]): Type of pipeline Example: nf-core. 35 documentation_url (Union[Unset, str]): Link to process documentation Example: 36 https://docs.cirro.bio/pipelines/catalog_targeted_sequencing/#crispr-screen-analysis. 37 file_requirements_message (Union[Unset, str]): Description of the files to be uploaded (optional) 38 owner (Union[None, Unset, str]): Username of the pipeline creator (blank if Cirro curated) 39 created_at (Union[Unset, datetime.datetime]): When the process was created (does not reflect the pipeline code) 40 updated_at (Union[Unset, datetime.datetime]): When the process was updated (does not reflect the pipeline code) 41 """ 42 43 id: str 44 name: str 45 description: str 46 data_type: str 47 executor: Executor 48 child_process_ids: List[str] 49 parent_process_ids: List[str] 50 linked_project_ids: List[str] 51 is_tenant_wide: bool 52 allow_multiple_sources: bool 53 uses_sample_sheet: bool 54 is_archived: bool 55 category: Union[Unset, str] = UNSET 56 pipeline_type: Union[Unset, str] = UNSET 57 documentation_url: Union[Unset, str] = UNSET 58 file_requirements_message: Union[Unset, str] = UNSET 59 owner: Union[None, Unset, str] = UNSET 60 created_at: Union[Unset, datetime.datetime] = UNSET 61 updated_at: Union[Unset, datetime.datetime] = UNSET 62 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 63 64 def to_dict(self) -> Dict[str, Any]: 65 id = self.id 66 67 name = self.name 68 69 description = self.description 70 71 data_type = self.data_type 72 73 executor = self.executor.value 74 75 child_process_ids = self.child_process_ids 76 77 parent_process_ids = self.parent_process_ids 78 79 linked_project_ids = self.linked_project_ids 80 81 is_tenant_wide = self.is_tenant_wide 82 83 allow_multiple_sources = self.allow_multiple_sources 84 85 uses_sample_sheet = self.uses_sample_sheet 86 87 is_archived = self.is_archived 88 89 category = self.category 90 91 pipeline_type = self.pipeline_type 92 93 documentation_url = self.documentation_url 94 95 file_requirements_message = self.file_requirements_message 96 97 owner: Union[None, Unset, str] 98 if isinstance(self.owner, Unset): 99 owner = UNSET 100 else: 101 owner = self.owner 102 103 created_at: Union[Unset, str] = UNSET 104 if not isinstance(self.created_at, Unset): 105 created_at = self.created_at.isoformat() 106 107 updated_at: Union[Unset, str] = UNSET 108 if not isinstance(self.updated_at, Unset): 109 updated_at = self.updated_at.isoformat() 110 111 field_dict: Dict[str, Any] = {} 112 field_dict.update(self.additional_properties) 113 field_dict.update( 114 { 115 "id": id, 116 "name": name, 117 "description": description, 118 "dataType": data_type, 119 "executor": executor, 120 "childProcessIds": child_process_ids, 121 "parentProcessIds": parent_process_ids, 122 "linkedProjectIds": linked_project_ids, 123 "isTenantWide": is_tenant_wide, 124 "allowMultipleSources": allow_multiple_sources, 125 "usesSampleSheet": uses_sample_sheet, 126 "isArchived": is_archived, 127 } 128 ) 129 if category is not UNSET: 130 field_dict["category"] = category 131 if pipeline_type is not UNSET: 132 field_dict["pipelineType"] = pipeline_type 133 if documentation_url is not UNSET: 134 field_dict["documentationUrl"] = documentation_url 135 if file_requirements_message is not UNSET: 136 field_dict["fileRequirementsMessage"] = file_requirements_message 137 if owner is not UNSET: 138 field_dict["owner"] = owner 139 if created_at is not UNSET: 140 field_dict["createdAt"] = created_at 141 if updated_at is not UNSET: 142 field_dict["updatedAt"] = updated_at 143 144 return field_dict 145 146 @classmethod 147 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 148 d = src_dict.copy() 149 id = d.pop("id") 150 151 name = d.pop("name") 152 153 description = d.pop("description") 154 155 data_type = d.pop("dataType") 156 157 executor = Executor(d.pop("executor")) 158 159 child_process_ids = cast(List[str], d.pop("childProcessIds")) 160 161 parent_process_ids = cast(List[str], d.pop("parentProcessIds")) 162 163 linked_project_ids = cast(List[str], d.pop("linkedProjectIds")) 164 165 is_tenant_wide = d.pop("isTenantWide") 166 167 allow_multiple_sources = d.pop("allowMultipleSources") 168 169 uses_sample_sheet = d.pop("usesSampleSheet") 170 171 is_archived = d.pop("isArchived") 172 173 category = d.pop("category", UNSET) 174 175 pipeline_type = d.pop("pipelineType", UNSET) 176 177 documentation_url = d.pop("documentationUrl", UNSET) 178 179 file_requirements_message = d.pop("fileRequirementsMessage", UNSET) 180 181 def _parse_owner(data: object) -> Union[None, Unset, str]: 182 if data is None: 183 return data 184 if isinstance(data, Unset): 185 return data 186 return cast(Union[None, Unset, str], data) 187 188 owner = _parse_owner(d.pop("owner", UNSET)) 189 190 _created_at = d.pop("createdAt", UNSET) 191 created_at: Union[Unset, datetime.datetime] 192 if isinstance(_created_at, Unset): 193 created_at = UNSET 194 else: 195 created_at = isoparse(_created_at) 196 197 _updated_at = d.pop("updatedAt", UNSET) 198 updated_at: Union[Unset, datetime.datetime] 199 if isinstance(_updated_at, Unset): 200 updated_at = UNSET 201 else: 202 updated_at = isoparse(_updated_at) 203 204 process = cls( 205 id=id, 206 name=name, 207 description=description, 208 data_type=data_type, 209 executor=executor, 210 child_process_ids=child_process_ids, 211 parent_process_ids=parent_process_ids, 212 linked_project_ids=linked_project_ids, 213 is_tenant_wide=is_tenant_wide, 214 allow_multiple_sources=allow_multiple_sources, 215 uses_sample_sheet=uses_sample_sheet, 216 is_archived=is_archived, 217 category=category, 218 pipeline_type=pipeline_type, 219 documentation_url=documentation_url, 220 file_requirements_message=file_requirements_message, 221 owner=owner, 222 created_at=created_at, 223 updated_at=updated_at, 224 ) 225 226 process.additional_properties = d 227 return process 228 229 @property 230 def additional_keys(self) -> List[str]: 231 return list(self.additional_properties.keys())
Identifies a data type or pipeline in Cirro
Attributes:
- id (str): Unique ID of the Process Example: process-hutch-magic_flute-1_0.
- name (str): Friendly name for the process Example: MAGeCK Flute.
- description (str): Description of the process Example: MAGeCK Flute enables accurate identification of essential genes with their related biological functions.
- data_type (str): Name of the data type this pipeline produces (if it is not defined, use the name)
- executor (Executor): How the workflow is executed
- child_process_ids (List[str]): IDs of pipelines that can be run downstream
- parent_process_ids (List[str]): IDs of processes that can run this pipeline
- linked_project_ids (List[str]): Projects that can run this process
- is_tenant_wide (bool): Whether the process is shared with the tenant
- allow_multiple_sources (bool): Whether the pipeline is allowed to have multiple dataset sources
- uses_sample_sheet (bool): Whether the pipeline uses the Cirro-provided sample sheet
- is_archived (bool): Whether the process is marked as archived
- category (Union[Unset, str]): Category of the process Example: Microbial Analysis.
- pipeline_type (Union[Unset, str]): Type of pipeline Example: nf-core.
- documentation_url (Union[Unset, str]): Link to process documentation Example: https://docs.cirro.bio/pipelines/catalog_targeted_sequencing/#crispr-screen-analysis.
- file_requirements_message (Union[Unset, str]): Description of the files to be uploaded (optional)
- owner (Union[None, Unset, str]): Username of the pipeline creator (blank if Cirro curated)
- created_at (Union[Unset, datetime.datetime]): When the process was created (does not reflect the pipeline code)
- updated_at (Union[Unset, datetime.datetime]): When the process was updated (does not reflect the pipeline code)
42def __init__(self, id, name, description, data_type, executor, child_process_ids, parent_process_ids, linked_project_ids, is_tenant_wide, allow_multiple_sources, uses_sample_sheet, is_archived, category=attr_dict['category'].default, pipeline_type=attr_dict['pipeline_type'].default, documentation_url=attr_dict['documentation_url'].default, file_requirements_message=attr_dict['file_requirements_message'].default, owner=attr_dict['owner'].default, created_at=attr_dict['created_at'].default, updated_at=attr_dict['updated_at'].default): 43 self.id = id 44 self.name = name 45 self.description = description 46 self.data_type = data_type 47 self.executor = executor 48 self.child_process_ids = child_process_ids 49 self.parent_process_ids = parent_process_ids 50 self.linked_project_ids = linked_project_ids 51 self.is_tenant_wide = is_tenant_wide 52 self.allow_multiple_sources = allow_multiple_sources 53 self.uses_sample_sheet = uses_sample_sheet 54 self.is_archived = is_archived 55 self.category = category 56 self.pipeline_type = pipeline_type 57 self.documentation_url = documentation_url 58 self.file_requirements_message = file_requirements_message 59 self.owner = owner 60 self.created_at = created_at 61 self.updated_at = updated_at 62 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Process.
64 def to_dict(self) -> Dict[str, Any]: 65 id = self.id 66 67 name = self.name 68 69 description = self.description 70 71 data_type = self.data_type 72 73 executor = self.executor.value 74 75 child_process_ids = self.child_process_ids 76 77 parent_process_ids = self.parent_process_ids 78 79 linked_project_ids = self.linked_project_ids 80 81 is_tenant_wide = self.is_tenant_wide 82 83 allow_multiple_sources = self.allow_multiple_sources 84 85 uses_sample_sheet = self.uses_sample_sheet 86 87 is_archived = self.is_archived 88 89 category = self.category 90 91 pipeline_type = self.pipeline_type 92 93 documentation_url = self.documentation_url 94 95 file_requirements_message = self.file_requirements_message 96 97 owner: Union[None, Unset, str] 98 if isinstance(self.owner, Unset): 99 owner = UNSET 100 else: 101 owner = self.owner 102 103 created_at: Union[Unset, str] = UNSET 104 if not isinstance(self.created_at, Unset): 105 created_at = self.created_at.isoformat() 106 107 updated_at: Union[Unset, str] = UNSET 108 if not isinstance(self.updated_at, Unset): 109 updated_at = self.updated_at.isoformat() 110 111 field_dict: Dict[str, Any] = {} 112 field_dict.update(self.additional_properties) 113 field_dict.update( 114 { 115 "id": id, 116 "name": name, 117 "description": description, 118 "dataType": data_type, 119 "executor": executor, 120 "childProcessIds": child_process_ids, 121 "parentProcessIds": parent_process_ids, 122 "linkedProjectIds": linked_project_ids, 123 "isTenantWide": is_tenant_wide, 124 "allowMultipleSources": allow_multiple_sources, 125 "usesSampleSheet": uses_sample_sheet, 126 "isArchived": is_archived, 127 } 128 ) 129 if category is not UNSET: 130 field_dict["category"] = category 131 if pipeline_type is not UNSET: 132 field_dict["pipelineType"] = pipeline_type 133 if documentation_url is not UNSET: 134 field_dict["documentationUrl"] = documentation_url 135 if file_requirements_message is not UNSET: 136 field_dict["fileRequirementsMessage"] = file_requirements_message 137 if owner is not UNSET: 138 field_dict["owner"] = owner 139 if created_at is not UNSET: 140 field_dict["createdAt"] = created_at 141 if updated_at is not UNSET: 142 field_dict["updatedAt"] = updated_at 143 144 return field_dict
146 @classmethod 147 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 148 d = src_dict.copy() 149 id = d.pop("id") 150 151 name = d.pop("name") 152 153 description = d.pop("description") 154 155 data_type = d.pop("dataType") 156 157 executor = Executor(d.pop("executor")) 158 159 child_process_ids = cast(List[str], d.pop("childProcessIds")) 160 161 parent_process_ids = cast(List[str], d.pop("parentProcessIds")) 162 163 linked_project_ids = cast(List[str], d.pop("linkedProjectIds")) 164 165 is_tenant_wide = d.pop("isTenantWide") 166 167 allow_multiple_sources = d.pop("allowMultipleSources") 168 169 uses_sample_sheet = d.pop("usesSampleSheet") 170 171 is_archived = d.pop("isArchived") 172 173 category = d.pop("category", UNSET) 174 175 pipeline_type = d.pop("pipelineType", UNSET) 176 177 documentation_url = d.pop("documentationUrl", UNSET) 178 179 file_requirements_message = d.pop("fileRequirementsMessage", UNSET) 180 181 def _parse_owner(data: object) -> Union[None, Unset, str]: 182 if data is None: 183 return data 184 if isinstance(data, Unset): 185 return data 186 return cast(Union[None, Unset, str], data) 187 188 owner = _parse_owner(d.pop("owner", UNSET)) 189 190 _created_at = d.pop("createdAt", UNSET) 191 created_at: Union[Unset, datetime.datetime] 192 if isinstance(_created_at, Unset): 193 created_at = UNSET 194 else: 195 created_at = isoparse(_created_at) 196 197 _updated_at = d.pop("updatedAt", UNSET) 198 updated_at: Union[Unset, datetime.datetime] 199 if isinstance(_updated_at, Unset): 200 updated_at = UNSET 201 else: 202 updated_at = isoparse(_updated_at) 203 204 process = cls( 205 id=id, 206 name=name, 207 description=description, 208 data_type=data_type, 209 executor=executor, 210 child_process_ids=child_process_ids, 211 parent_process_ids=parent_process_ids, 212 linked_project_ids=linked_project_ids, 213 is_tenant_wide=is_tenant_wide, 214 allow_multiple_sources=allow_multiple_sources, 215 uses_sample_sheet=uses_sample_sheet, 216 is_archived=is_archived, 217 category=category, 218 pipeline_type=pipeline_type, 219 documentation_url=documentation_url, 220 file_requirements_message=file_requirements_message, 221 owner=owner, 222 created_at=created_at, 223 updated_at=updated_at, 224 ) 225 226 process.additional_properties = d 227 return process
21@_attrs_define 22class ProcessDetail: 23 """Identifies a data type or pipeline in Cirro 24 25 Attributes: 26 id (str): Unique ID of the Process Example: process-hutch-magic_flute-1_0. 27 name (str): Friendly name for the process Example: MAGeCK Flute. 28 description (str): Description of the process Example: MAGeCK Flute enables accurate identification of essential 29 genes with their related biological functions. 30 data_type (str): Name of the data type this pipeline produces (if it is not defined, use the name) 31 executor (Executor): How the workflow is executed 32 child_process_ids (List[str]): IDs of pipelines that can be run downstream 33 parent_process_ids (List[str]): IDs of processes that can run this pipeline 34 linked_project_ids (List[str]): Projects that can run this process 35 is_tenant_wide (bool): Whether the process is shared with the tenant 36 allow_multiple_sources (bool): Whether the pipeline is allowed to have multiple dataset sources 37 uses_sample_sheet (bool): Whether the pipeline uses the Cirro-provided sample sheet 38 is_archived (bool): Whether the process is marked as archived 39 category (Union[Unset, str]): Category of the process Example: Microbial Analysis. 40 pipeline_type (Union[Unset, str]): Type of pipeline Example: nf-core. 41 documentation_url (Union[Unset, str]): Link to process documentation Example: 42 https://docs.cirro.bio/pipelines/catalog_targeted_sequencing/#crispr-screen-analysis. 43 file_requirements_message (Union[Unset, str]): Description of the files to be uploaded (optional) 44 pipeline_code (Union['PipelineCode', None, Unset]): 45 owner (Union[None, Unset, str]): Username of the pipeline creator (blank if Cirro curated) 46 custom_settings (Union['CustomPipelineSettings', None, Unset]): 47 file_mapping_rules (Union[List['FileMappingRule'], None, Unset]): 48 created_at (Union[Unset, datetime.datetime]): When the process was created (does not reflect the pipeline code) 49 updated_at (Union[Unset, datetime.datetime]): When the process was updated (does not reflect the pipeline code) 50 """ 51 52 id: str 53 name: str 54 description: str 55 data_type: str 56 executor: Executor 57 child_process_ids: List[str] 58 parent_process_ids: List[str] 59 linked_project_ids: List[str] 60 is_tenant_wide: bool 61 allow_multiple_sources: bool 62 uses_sample_sheet: bool 63 is_archived: bool 64 category: Union[Unset, str] = UNSET 65 pipeline_type: Union[Unset, str] = UNSET 66 documentation_url: Union[Unset, str] = UNSET 67 file_requirements_message: Union[Unset, str] = UNSET 68 pipeline_code: Union["PipelineCode", None, Unset] = UNSET 69 owner: Union[None, Unset, str] = UNSET 70 custom_settings: Union["CustomPipelineSettings", None, Unset] = UNSET 71 file_mapping_rules: Union[List["FileMappingRule"], None, Unset] = UNSET 72 created_at: Union[Unset, datetime.datetime] = UNSET 73 updated_at: Union[Unset, datetime.datetime] = UNSET 74 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 75 76 def to_dict(self) -> Dict[str, Any]: 77 from ..models.custom_pipeline_settings import CustomPipelineSettings 78 from ..models.pipeline_code import PipelineCode 79 80 id = self.id 81 82 name = self.name 83 84 description = self.description 85 86 data_type = self.data_type 87 88 executor = self.executor.value 89 90 child_process_ids = self.child_process_ids 91 92 parent_process_ids = self.parent_process_ids 93 94 linked_project_ids = self.linked_project_ids 95 96 is_tenant_wide = self.is_tenant_wide 97 98 allow_multiple_sources = self.allow_multiple_sources 99 100 uses_sample_sheet = self.uses_sample_sheet 101 102 is_archived = self.is_archived 103 104 category = self.category 105 106 pipeline_type = self.pipeline_type 107 108 documentation_url = self.documentation_url 109 110 file_requirements_message = self.file_requirements_message 111 112 pipeline_code: Union[Dict[str, Any], None, Unset] 113 if isinstance(self.pipeline_code, Unset): 114 pipeline_code = UNSET 115 elif isinstance(self.pipeline_code, PipelineCode): 116 pipeline_code = self.pipeline_code.to_dict() 117 else: 118 pipeline_code = self.pipeline_code 119 120 owner: Union[None, Unset, str] 121 if isinstance(self.owner, Unset): 122 owner = UNSET 123 else: 124 owner = self.owner 125 126 custom_settings: Union[Dict[str, Any], None, Unset] 127 if isinstance(self.custom_settings, Unset): 128 custom_settings = UNSET 129 elif isinstance(self.custom_settings, CustomPipelineSettings): 130 custom_settings = self.custom_settings.to_dict() 131 else: 132 custom_settings = self.custom_settings 133 134 file_mapping_rules: Union[List[Dict[str, Any]], None, Unset] 135 if isinstance(self.file_mapping_rules, Unset): 136 file_mapping_rules = UNSET 137 elif isinstance(self.file_mapping_rules, list): 138 file_mapping_rules = [] 139 for file_mapping_rules_type_0_item_data in self.file_mapping_rules: 140 file_mapping_rules_type_0_item = file_mapping_rules_type_0_item_data.to_dict() 141 file_mapping_rules.append(file_mapping_rules_type_0_item) 142 143 else: 144 file_mapping_rules = self.file_mapping_rules 145 146 created_at: Union[Unset, str] = UNSET 147 if not isinstance(self.created_at, Unset): 148 created_at = self.created_at.isoformat() 149 150 updated_at: Union[Unset, str] = UNSET 151 if not isinstance(self.updated_at, Unset): 152 updated_at = self.updated_at.isoformat() 153 154 field_dict: Dict[str, Any] = {} 155 field_dict.update(self.additional_properties) 156 field_dict.update( 157 { 158 "id": id, 159 "name": name, 160 "description": description, 161 "dataType": data_type, 162 "executor": executor, 163 "childProcessIds": child_process_ids, 164 "parentProcessIds": parent_process_ids, 165 "linkedProjectIds": linked_project_ids, 166 "isTenantWide": is_tenant_wide, 167 "allowMultipleSources": allow_multiple_sources, 168 "usesSampleSheet": uses_sample_sheet, 169 "isArchived": is_archived, 170 } 171 ) 172 if category is not UNSET: 173 field_dict["category"] = category 174 if pipeline_type is not UNSET: 175 field_dict["pipelineType"] = pipeline_type 176 if documentation_url is not UNSET: 177 field_dict["documentationUrl"] = documentation_url 178 if file_requirements_message is not UNSET: 179 field_dict["fileRequirementsMessage"] = file_requirements_message 180 if pipeline_code is not UNSET: 181 field_dict["pipelineCode"] = pipeline_code 182 if owner is not UNSET: 183 field_dict["owner"] = owner 184 if custom_settings is not UNSET: 185 field_dict["customSettings"] = custom_settings 186 if file_mapping_rules is not UNSET: 187 field_dict["fileMappingRules"] = file_mapping_rules 188 if created_at is not UNSET: 189 field_dict["createdAt"] = created_at 190 if updated_at is not UNSET: 191 field_dict["updatedAt"] = updated_at 192 193 return field_dict 194 195 @classmethod 196 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 197 from ..models.custom_pipeline_settings import CustomPipelineSettings 198 from ..models.file_mapping_rule import FileMappingRule 199 from ..models.pipeline_code import PipelineCode 200 201 d = src_dict.copy() 202 id = d.pop("id") 203 204 name = d.pop("name") 205 206 description = d.pop("description") 207 208 data_type = d.pop("dataType") 209 210 executor = Executor(d.pop("executor")) 211 212 child_process_ids = cast(List[str], d.pop("childProcessIds")) 213 214 parent_process_ids = cast(List[str], d.pop("parentProcessIds")) 215 216 linked_project_ids = cast(List[str], d.pop("linkedProjectIds")) 217 218 is_tenant_wide = d.pop("isTenantWide") 219 220 allow_multiple_sources = d.pop("allowMultipleSources") 221 222 uses_sample_sheet = d.pop("usesSampleSheet") 223 224 is_archived = d.pop("isArchived") 225 226 category = d.pop("category", UNSET) 227 228 pipeline_type = d.pop("pipelineType", UNSET) 229 230 documentation_url = d.pop("documentationUrl", UNSET) 231 232 file_requirements_message = d.pop("fileRequirementsMessage", UNSET) 233 234 def _parse_pipeline_code(data: object) -> Union["PipelineCode", None, Unset]: 235 if data is None: 236 return data 237 if isinstance(data, Unset): 238 return data 239 try: 240 if not isinstance(data, dict): 241 raise TypeError() 242 pipeline_code_type_1 = PipelineCode.from_dict(data) 243 244 return pipeline_code_type_1 245 except: # noqa: E722 246 pass 247 return cast(Union["PipelineCode", None, Unset], data) 248 249 pipeline_code = _parse_pipeline_code(d.pop("pipelineCode", UNSET)) 250 251 def _parse_owner(data: object) -> Union[None, Unset, str]: 252 if data is None: 253 return data 254 if isinstance(data, Unset): 255 return data 256 return cast(Union[None, Unset, str], data) 257 258 owner = _parse_owner(d.pop("owner", UNSET)) 259 260 def _parse_custom_settings(data: object) -> Union["CustomPipelineSettings", None, Unset]: 261 if data is None: 262 return data 263 if isinstance(data, Unset): 264 return data 265 try: 266 if not isinstance(data, dict): 267 raise TypeError() 268 custom_settings_type_1 = CustomPipelineSettings.from_dict(data) 269 270 return custom_settings_type_1 271 except: # noqa: E722 272 pass 273 return cast(Union["CustomPipelineSettings", None, Unset], data) 274 275 custom_settings = _parse_custom_settings(d.pop("customSettings", UNSET)) 276 277 def _parse_file_mapping_rules(data: object) -> Union[List["FileMappingRule"], None, Unset]: 278 if data is None: 279 return data 280 if isinstance(data, Unset): 281 return data 282 try: 283 if not isinstance(data, list): 284 raise TypeError() 285 file_mapping_rules_type_0 = [] 286 _file_mapping_rules_type_0 = data 287 for file_mapping_rules_type_0_item_data in _file_mapping_rules_type_0: 288 file_mapping_rules_type_0_item = FileMappingRule.from_dict(file_mapping_rules_type_0_item_data) 289 290 file_mapping_rules_type_0.append(file_mapping_rules_type_0_item) 291 292 return file_mapping_rules_type_0 293 except: # noqa: E722 294 pass 295 return cast(Union[List["FileMappingRule"], None, Unset], data) 296 297 file_mapping_rules = _parse_file_mapping_rules(d.pop("fileMappingRules", UNSET)) 298 299 _created_at = d.pop("createdAt", UNSET) 300 created_at: Union[Unset, datetime.datetime] 301 if isinstance(_created_at, Unset): 302 created_at = UNSET 303 else: 304 created_at = isoparse(_created_at) 305 306 _updated_at = d.pop("updatedAt", UNSET) 307 updated_at: Union[Unset, datetime.datetime] 308 if isinstance(_updated_at, Unset): 309 updated_at = UNSET 310 else: 311 updated_at = isoparse(_updated_at) 312 313 process_detail = cls( 314 id=id, 315 name=name, 316 description=description, 317 data_type=data_type, 318 executor=executor, 319 child_process_ids=child_process_ids, 320 parent_process_ids=parent_process_ids, 321 linked_project_ids=linked_project_ids, 322 is_tenant_wide=is_tenant_wide, 323 allow_multiple_sources=allow_multiple_sources, 324 uses_sample_sheet=uses_sample_sheet, 325 is_archived=is_archived, 326 category=category, 327 pipeline_type=pipeline_type, 328 documentation_url=documentation_url, 329 file_requirements_message=file_requirements_message, 330 pipeline_code=pipeline_code, 331 owner=owner, 332 custom_settings=custom_settings, 333 file_mapping_rules=file_mapping_rules, 334 created_at=created_at, 335 updated_at=updated_at, 336 ) 337 338 process_detail.additional_properties = d 339 return process_detail 340 341 @property 342 def additional_keys(self) -> List[str]: 343 return list(self.additional_properties.keys())
Identifies a data type or pipeline in Cirro
Attributes:
- id (str): Unique ID of the Process Example: process-hutch-magic_flute-1_0.
- name (str): Friendly name for the process Example: MAGeCK Flute.
- description (str): Description of the process Example: MAGeCK Flute enables accurate identification of essential genes with their related biological functions.
- data_type (str): Name of the data type this pipeline produces (if it is not defined, use the name)
- executor (Executor): How the workflow is executed
- child_process_ids (List[str]): IDs of pipelines that can be run downstream
- parent_process_ids (List[str]): IDs of processes that can run this pipeline
- linked_project_ids (List[str]): Projects that can run this process
- is_tenant_wide (bool): Whether the process is shared with the tenant
- allow_multiple_sources (bool): Whether the pipeline is allowed to have multiple dataset sources
- uses_sample_sheet (bool): Whether the pipeline uses the Cirro-provided sample sheet
- is_archived (bool): Whether the process is marked as archived
- category (Union[Unset, str]): Category of the process Example: Microbial Analysis.
- pipeline_type (Union[Unset, str]): Type of pipeline Example: nf-core.
- documentation_url (Union[Unset, str]): Link to process documentation Example: https://docs.cirro.bio/pipelines/catalog_targeted_sequencing/#crispr-screen-analysis.
- file_requirements_message (Union[Unset, str]): Description of the files to be uploaded (optional)
- pipeline_code (Union['PipelineCode', None, Unset]):
- owner (Union[None, Unset, str]): Username of the pipeline creator (blank if Cirro curated)
- custom_settings (Union['CustomPipelineSettings', None, Unset]):
- file_mapping_rules (Union[List['FileMappingRule'], None, Unset]):
- created_at (Union[Unset, datetime.datetime]): When the process was created (does not reflect the pipeline code)
- updated_at (Union[Unset, datetime.datetime]): When the process was updated (does not reflect the pipeline code)
45def __init__(self, id, name, description, data_type, executor, child_process_ids, parent_process_ids, linked_project_ids, is_tenant_wide, allow_multiple_sources, uses_sample_sheet, is_archived, category=attr_dict['category'].default, pipeline_type=attr_dict['pipeline_type'].default, documentation_url=attr_dict['documentation_url'].default, file_requirements_message=attr_dict['file_requirements_message'].default, pipeline_code=attr_dict['pipeline_code'].default, owner=attr_dict['owner'].default, custom_settings=attr_dict['custom_settings'].default, file_mapping_rules=attr_dict['file_mapping_rules'].default, created_at=attr_dict['created_at'].default, updated_at=attr_dict['updated_at'].default): 46 self.id = id 47 self.name = name 48 self.description = description 49 self.data_type = data_type 50 self.executor = executor 51 self.child_process_ids = child_process_ids 52 self.parent_process_ids = parent_process_ids 53 self.linked_project_ids = linked_project_ids 54 self.is_tenant_wide = is_tenant_wide 55 self.allow_multiple_sources = allow_multiple_sources 56 self.uses_sample_sheet = uses_sample_sheet 57 self.is_archived = is_archived 58 self.category = category 59 self.pipeline_type = pipeline_type 60 self.documentation_url = documentation_url 61 self.file_requirements_message = file_requirements_message 62 self.pipeline_code = pipeline_code 63 self.owner = owner 64 self.custom_settings = custom_settings 65 self.file_mapping_rules = file_mapping_rules 66 self.created_at = created_at 67 self.updated_at = updated_at 68 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProcessDetail.
76 def to_dict(self) -> Dict[str, Any]: 77 from ..models.custom_pipeline_settings import CustomPipelineSettings 78 from ..models.pipeline_code import PipelineCode 79 80 id = self.id 81 82 name = self.name 83 84 description = self.description 85 86 data_type = self.data_type 87 88 executor = self.executor.value 89 90 child_process_ids = self.child_process_ids 91 92 parent_process_ids = self.parent_process_ids 93 94 linked_project_ids = self.linked_project_ids 95 96 is_tenant_wide = self.is_tenant_wide 97 98 allow_multiple_sources = self.allow_multiple_sources 99 100 uses_sample_sheet = self.uses_sample_sheet 101 102 is_archived = self.is_archived 103 104 category = self.category 105 106 pipeline_type = self.pipeline_type 107 108 documentation_url = self.documentation_url 109 110 file_requirements_message = self.file_requirements_message 111 112 pipeline_code: Union[Dict[str, Any], None, Unset] 113 if isinstance(self.pipeline_code, Unset): 114 pipeline_code = UNSET 115 elif isinstance(self.pipeline_code, PipelineCode): 116 pipeline_code = self.pipeline_code.to_dict() 117 else: 118 pipeline_code = self.pipeline_code 119 120 owner: Union[None, Unset, str] 121 if isinstance(self.owner, Unset): 122 owner = UNSET 123 else: 124 owner = self.owner 125 126 custom_settings: Union[Dict[str, Any], None, Unset] 127 if isinstance(self.custom_settings, Unset): 128 custom_settings = UNSET 129 elif isinstance(self.custom_settings, CustomPipelineSettings): 130 custom_settings = self.custom_settings.to_dict() 131 else: 132 custom_settings = self.custom_settings 133 134 file_mapping_rules: Union[List[Dict[str, Any]], None, Unset] 135 if isinstance(self.file_mapping_rules, Unset): 136 file_mapping_rules = UNSET 137 elif isinstance(self.file_mapping_rules, list): 138 file_mapping_rules = [] 139 for file_mapping_rules_type_0_item_data in self.file_mapping_rules: 140 file_mapping_rules_type_0_item = file_mapping_rules_type_0_item_data.to_dict() 141 file_mapping_rules.append(file_mapping_rules_type_0_item) 142 143 else: 144 file_mapping_rules = self.file_mapping_rules 145 146 created_at: Union[Unset, str] = UNSET 147 if not isinstance(self.created_at, Unset): 148 created_at = self.created_at.isoformat() 149 150 updated_at: Union[Unset, str] = UNSET 151 if not isinstance(self.updated_at, Unset): 152 updated_at = self.updated_at.isoformat() 153 154 field_dict: Dict[str, Any] = {} 155 field_dict.update(self.additional_properties) 156 field_dict.update( 157 { 158 "id": id, 159 "name": name, 160 "description": description, 161 "dataType": data_type, 162 "executor": executor, 163 "childProcessIds": child_process_ids, 164 "parentProcessIds": parent_process_ids, 165 "linkedProjectIds": linked_project_ids, 166 "isTenantWide": is_tenant_wide, 167 "allowMultipleSources": allow_multiple_sources, 168 "usesSampleSheet": uses_sample_sheet, 169 "isArchived": is_archived, 170 } 171 ) 172 if category is not UNSET: 173 field_dict["category"] = category 174 if pipeline_type is not UNSET: 175 field_dict["pipelineType"] = pipeline_type 176 if documentation_url is not UNSET: 177 field_dict["documentationUrl"] = documentation_url 178 if file_requirements_message is not UNSET: 179 field_dict["fileRequirementsMessage"] = file_requirements_message 180 if pipeline_code is not UNSET: 181 field_dict["pipelineCode"] = pipeline_code 182 if owner is not UNSET: 183 field_dict["owner"] = owner 184 if custom_settings is not UNSET: 185 field_dict["customSettings"] = custom_settings 186 if file_mapping_rules is not UNSET: 187 field_dict["fileMappingRules"] = file_mapping_rules 188 if created_at is not UNSET: 189 field_dict["createdAt"] = created_at 190 if updated_at is not UNSET: 191 field_dict["updatedAt"] = updated_at 192 193 return field_dict
195 @classmethod 196 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 197 from ..models.custom_pipeline_settings import CustomPipelineSettings 198 from ..models.file_mapping_rule import FileMappingRule 199 from ..models.pipeline_code import PipelineCode 200 201 d = src_dict.copy() 202 id = d.pop("id") 203 204 name = d.pop("name") 205 206 description = d.pop("description") 207 208 data_type = d.pop("dataType") 209 210 executor = Executor(d.pop("executor")) 211 212 child_process_ids = cast(List[str], d.pop("childProcessIds")) 213 214 parent_process_ids = cast(List[str], d.pop("parentProcessIds")) 215 216 linked_project_ids = cast(List[str], d.pop("linkedProjectIds")) 217 218 is_tenant_wide = d.pop("isTenantWide") 219 220 allow_multiple_sources = d.pop("allowMultipleSources") 221 222 uses_sample_sheet = d.pop("usesSampleSheet") 223 224 is_archived = d.pop("isArchived") 225 226 category = d.pop("category", UNSET) 227 228 pipeline_type = d.pop("pipelineType", UNSET) 229 230 documentation_url = d.pop("documentationUrl", UNSET) 231 232 file_requirements_message = d.pop("fileRequirementsMessage", UNSET) 233 234 def _parse_pipeline_code(data: object) -> Union["PipelineCode", None, Unset]: 235 if data is None: 236 return data 237 if isinstance(data, Unset): 238 return data 239 try: 240 if not isinstance(data, dict): 241 raise TypeError() 242 pipeline_code_type_1 = PipelineCode.from_dict(data) 243 244 return pipeline_code_type_1 245 except: # noqa: E722 246 pass 247 return cast(Union["PipelineCode", None, Unset], data) 248 249 pipeline_code = _parse_pipeline_code(d.pop("pipelineCode", UNSET)) 250 251 def _parse_owner(data: object) -> Union[None, Unset, str]: 252 if data is None: 253 return data 254 if isinstance(data, Unset): 255 return data 256 return cast(Union[None, Unset, str], data) 257 258 owner = _parse_owner(d.pop("owner", UNSET)) 259 260 def _parse_custom_settings(data: object) -> Union["CustomPipelineSettings", None, Unset]: 261 if data is None: 262 return data 263 if isinstance(data, Unset): 264 return data 265 try: 266 if not isinstance(data, dict): 267 raise TypeError() 268 custom_settings_type_1 = CustomPipelineSettings.from_dict(data) 269 270 return custom_settings_type_1 271 except: # noqa: E722 272 pass 273 return cast(Union["CustomPipelineSettings", None, Unset], data) 274 275 custom_settings = _parse_custom_settings(d.pop("customSettings", UNSET)) 276 277 def _parse_file_mapping_rules(data: object) -> Union[List["FileMappingRule"], None, Unset]: 278 if data is None: 279 return data 280 if isinstance(data, Unset): 281 return data 282 try: 283 if not isinstance(data, list): 284 raise TypeError() 285 file_mapping_rules_type_0 = [] 286 _file_mapping_rules_type_0 = data 287 for file_mapping_rules_type_0_item_data in _file_mapping_rules_type_0: 288 file_mapping_rules_type_0_item = FileMappingRule.from_dict(file_mapping_rules_type_0_item_data) 289 290 file_mapping_rules_type_0.append(file_mapping_rules_type_0_item) 291 292 return file_mapping_rules_type_0 293 except: # noqa: E722 294 pass 295 return cast(Union[List["FileMappingRule"], None, Unset], data) 296 297 file_mapping_rules = _parse_file_mapping_rules(d.pop("fileMappingRules", UNSET)) 298 299 _created_at = d.pop("createdAt", UNSET) 300 created_at: Union[Unset, datetime.datetime] 301 if isinstance(_created_at, Unset): 302 created_at = UNSET 303 else: 304 created_at = isoparse(_created_at) 305 306 _updated_at = d.pop("updatedAt", UNSET) 307 updated_at: Union[Unset, datetime.datetime] 308 if isinstance(_updated_at, Unset): 309 updated_at = UNSET 310 else: 311 updated_at = isoparse(_updated_at) 312 313 process_detail = cls( 314 id=id, 315 name=name, 316 description=description, 317 data_type=data_type, 318 executor=executor, 319 child_process_ids=child_process_ids, 320 parent_process_ids=parent_process_ids, 321 linked_project_ids=linked_project_ids, 322 is_tenant_wide=is_tenant_wide, 323 allow_multiple_sources=allow_multiple_sources, 324 uses_sample_sheet=uses_sample_sheet, 325 is_archived=is_archived, 326 category=category, 327 pipeline_type=pipeline_type, 328 documentation_url=documentation_url, 329 file_requirements_message=file_requirements_message, 330 pipeline_code=pipeline_code, 331 owner=owner, 332 custom_settings=custom_settings, 333 file_mapping_rules=file_mapping_rules, 334 created_at=created_at, 335 updated_at=updated_at, 336 ) 337 338 process_detail.additional_properties = d 339 return process_detail
16@_attrs_define 17class Project: 18 """ 19 Attributes: 20 id (str): 21 name (str): 22 description (str): 23 status (Status): 24 tags (List['Tag']): 25 organization (str): 26 classification_ids (List[str]): 27 billing_account_id (str): 28 """ 29 30 id: str 31 name: str 32 description: str 33 status: Status 34 tags: List["Tag"] 35 organization: str 36 classification_ids: List[str] 37 billing_account_id: str 38 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 39 40 def to_dict(self) -> Dict[str, Any]: 41 id = self.id 42 43 name = self.name 44 45 description = self.description 46 47 status = self.status.value 48 49 tags = [] 50 for tags_item_data in self.tags: 51 tags_item = tags_item_data.to_dict() 52 tags.append(tags_item) 53 54 organization = self.organization 55 56 classification_ids = self.classification_ids 57 58 billing_account_id = self.billing_account_id 59 60 field_dict: Dict[str, Any] = {} 61 field_dict.update(self.additional_properties) 62 field_dict.update( 63 { 64 "id": id, 65 "name": name, 66 "description": description, 67 "status": status, 68 "tags": tags, 69 "organization": organization, 70 "classificationIds": classification_ids, 71 "billingAccountId": billing_account_id, 72 } 73 ) 74 75 return field_dict 76 77 @classmethod 78 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 79 from ..models.tag import Tag 80 81 d = src_dict.copy() 82 id = d.pop("id") 83 84 name = d.pop("name") 85 86 description = d.pop("description") 87 88 status = Status(d.pop("status")) 89 90 tags = [] 91 _tags = d.pop("tags") 92 for tags_item_data in _tags: 93 tags_item = Tag.from_dict(tags_item_data) 94 95 tags.append(tags_item) 96 97 organization = d.pop("organization") 98 99 classification_ids = cast(List[str], d.pop("classificationIds")) 100 101 billing_account_id = d.pop("billingAccountId") 102 103 project = cls( 104 id=id, 105 name=name, 106 description=description, 107 status=status, 108 tags=tags, 109 organization=organization, 110 classification_ids=classification_ids, 111 billing_account_id=billing_account_id, 112 ) 113 114 project.additional_properties = d 115 return project 116 117 @property 118 def additional_keys(self) -> List[str]: 119 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- description (str):
- status (Status):
- tags (List['Tag']):
- organization (str):
- classification_ids (List[str]):
- billing_account_id (str):
31def __init__(self, id, name, description, status, tags, organization, classification_ids, billing_account_id): 32 self.id = id 33 self.name = name 34 self.description = description 35 self.status = status 36 self.tags = tags 37 self.organization = organization 38 self.classification_ids = classification_ids 39 self.billing_account_id = billing_account_id 40 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Project.
40 def to_dict(self) -> Dict[str, Any]: 41 id = self.id 42 43 name = self.name 44 45 description = self.description 46 47 status = self.status.value 48 49 tags = [] 50 for tags_item_data in self.tags: 51 tags_item = tags_item_data.to_dict() 52 tags.append(tags_item) 53 54 organization = self.organization 55 56 classification_ids = self.classification_ids 57 58 billing_account_id = self.billing_account_id 59 60 field_dict: Dict[str, Any] = {} 61 field_dict.update(self.additional_properties) 62 field_dict.update( 63 { 64 "id": id, 65 "name": name, 66 "description": description, 67 "status": status, 68 "tags": tags, 69 "organization": organization, 70 "classificationIds": classification_ids, 71 "billingAccountId": billing_account_id, 72 } 73 ) 74 75 return field_dict
77 @classmethod 78 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 79 from ..models.tag import Tag 80 81 d = src_dict.copy() 82 id = d.pop("id") 83 84 name = d.pop("name") 85 86 description = d.pop("description") 87 88 status = Status(d.pop("status")) 89 90 tags = [] 91 _tags = d.pop("tags") 92 for tags_item_data in _tags: 93 tags_item = Tag.from_dict(tags_item_data) 94 95 tags.append(tags_item) 96 97 organization = d.pop("organization") 98 99 classification_ids = cast(List[str], d.pop("classificationIds")) 100 101 billing_account_id = d.pop("billingAccountId") 102 103 project = cls( 104 id=id, 105 name=name, 106 description=description, 107 status=status, 108 tags=tags, 109 organization=organization, 110 classification_ids=classification_ids, 111 billing_account_id=billing_account_id, 112 ) 113 114 project.additional_properties = d 115 return project
15@_attrs_define 16class ProjectAccessRequest: 17 """ 18 Attributes: 19 id (str): 20 username (str): 21 project_id (str): 22 role (ProjectRole): 23 message (str): 24 status (RequestStatus): 25 reviewer_username (str): 26 created_at (datetime.datetime): 27 expiry (datetime.datetime): 28 """ 29 30 id: str 31 username: str 32 project_id: str 33 role: ProjectRole 34 message: str 35 status: RequestStatus 36 reviewer_username: str 37 created_at: datetime.datetime 38 expiry: datetime.datetime 39 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 40 41 def to_dict(self) -> Dict[str, Any]: 42 id = self.id 43 44 username = self.username 45 46 project_id = self.project_id 47 48 role = self.role.value 49 50 message = self.message 51 52 status = self.status.value 53 54 reviewer_username = self.reviewer_username 55 56 created_at = self.created_at.isoformat() 57 58 expiry = self.expiry.isoformat() 59 60 field_dict: Dict[str, Any] = {} 61 field_dict.update(self.additional_properties) 62 field_dict.update( 63 { 64 "id": id, 65 "username": username, 66 "projectId": project_id, 67 "role": role, 68 "message": message, 69 "status": status, 70 "reviewerUsername": reviewer_username, 71 "createdAt": created_at, 72 "expiry": expiry, 73 } 74 ) 75 76 return field_dict 77 78 @classmethod 79 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 80 d = src_dict.copy() 81 id = d.pop("id") 82 83 username = d.pop("username") 84 85 project_id = d.pop("projectId") 86 87 role = ProjectRole(d.pop("role")) 88 89 message = d.pop("message") 90 91 status = RequestStatus(d.pop("status")) 92 93 reviewer_username = d.pop("reviewerUsername") 94 95 created_at = isoparse(d.pop("createdAt")) 96 97 expiry = isoparse(d.pop("expiry")) 98 99 project_access_request = cls( 100 id=id, 101 username=username, 102 project_id=project_id, 103 role=role, 104 message=message, 105 status=status, 106 reviewer_username=reviewer_username, 107 created_at=created_at, 108 expiry=expiry, 109 ) 110 111 project_access_request.additional_properties = d 112 return project_access_request 113 114 @property 115 def additional_keys(self) -> List[str]: 116 return list(self.additional_properties.keys())
Attributes:
- id (str):
- username (str):
- project_id (str):
- role (ProjectRole):
- message (str):
- status (RequestStatus):
- reviewer_username (str):
- created_at (datetime.datetime):
- expiry (datetime.datetime):
32def __init__(self, id, username, project_id, role, message, status, reviewer_username, created_at, expiry): 33 self.id = id 34 self.username = username 35 self.project_id = project_id 36 self.role = role 37 self.message = message 38 self.status = status 39 self.reviewer_username = reviewer_username 40 self.created_at = created_at 41 self.expiry = expiry 42 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectAccessRequest.
41 def to_dict(self) -> Dict[str, Any]: 42 id = self.id 43 44 username = self.username 45 46 project_id = self.project_id 47 48 role = self.role.value 49 50 message = self.message 51 52 status = self.status.value 53 54 reviewer_username = self.reviewer_username 55 56 created_at = self.created_at.isoformat() 57 58 expiry = self.expiry.isoformat() 59 60 field_dict: Dict[str, Any] = {} 61 field_dict.update(self.additional_properties) 62 field_dict.update( 63 { 64 "id": id, 65 "username": username, 66 "projectId": project_id, 67 "role": role, 68 "message": message, 69 "status": status, 70 "reviewerUsername": reviewer_username, 71 "createdAt": created_at, 72 "expiry": expiry, 73 } 74 ) 75 76 return field_dict
78 @classmethod 79 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 80 d = src_dict.copy() 81 id = d.pop("id") 82 83 username = d.pop("username") 84 85 project_id = d.pop("projectId") 86 87 role = ProjectRole(d.pop("role")) 88 89 message = d.pop("message") 90 91 status = RequestStatus(d.pop("status")) 92 93 reviewer_username = d.pop("reviewerUsername") 94 95 created_at = isoparse(d.pop("createdAt")) 96 97 expiry = isoparse(d.pop("expiry")) 98 99 project_access_request = cls( 100 id=id, 101 username=username, 102 project_id=project_id, 103 role=role, 104 message=message, 105 status=status, 106 reviewer_username=reviewer_username, 107 created_at=created_at, 108 expiry=expiry, 109 ) 110 111 project_access_request.additional_properties = d 112 return project_access_request
5class ProjectAccessType(str, Enum): 6 DATASET_UPLOAD = "DATASET_UPLOAD" 7 PROJECT_DOWNLOAD = "PROJECT_DOWNLOAD" 8 REFERENCE_UPLOAD = "REFERENCE_UPLOAD" 9 SAMPLESHEET_UPLOAD = "SAMPLESHEET_UPLOAD" 10 SHARED_DATASET_DOWNLOAD = "SHARED_DATASET_DOWNLOAD" 11 UNKNOWN = "UNKNOWN" 12 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 13 14 def __str__(self) -> str: 15 return str(self.value) 16 17 @classmethod 18 def _missing_(cls, number): 19 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
12@_attrs_define 13class ProjectCreateOptions: 14 """ 15 Attributes: 16 enabled_account_types (List[CloudAccountType]): 17 portal_account_id (str): 18 portal_region (str): 19 template_url (str): 20 wizard_url (str): 21 """ 22 23 enabled_account_types: List[CloudAccountType] 24 portal_account_id: str 25 portal_region: str 26 template_url: str 27 wizard_url: str 28 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 29 30 def to_dict(self) -> Dict[str, Any]: 31 enabled_account_types = [] 32 for enabled_account_types_item_data in self.enabled_account_types: 33 enabled_account_types_item = enabled_account_types_item_data.value 34 enabled_account_types.append(enabled_account_types_item) 35 36 portal_account_id = self.portal_account_id 37 38 portal_region = self.portal_region 39 40 template_url = self.template_url 41 42 wizard_url = self.wizard_url 43 44 field_dict: Dict[str, Any] = {} 45 field_dict.update(self.additional_properties) 46 field_dict.update( 47 { 48 "enabledAccountTypes": enabled_account_types, 49 "portalAccountId": portal_account_id, 50 "portalRegion": portal_region, 51 "templateUrl": template_url, 52 "wizardUrl": wizard_url, 53 } 54 ) 55 56 return field_dict 57 58 @classmethod 59 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 60 d = src_dict.copy() 61 enabled_account_types = [] 62 _enabled_account_types = d.pop("enabledAccountTypes") 63 for enabled_account_types_item_data in _enabled_account_types: 64 enabled_account_types_item = CloudAccountType(enabled_account_types_item_data) 65 66 enabled_account_types.append(enabled_account_types_item) 67 68 portal_account_id = d.pop("portalAccountId") 69 70 portal_region = d.pop("portalRegion") 71 72 template_url = d.pop("templateUrl") 73 74 wizard_url = d.pop("wizardUrl") 75 76 project_create_options = cls( 77 enabled_account_types=enabled_account_types, 78 portal_account_id=portal_account_id, 79 portal_region=portal_region, 80 template_url=template_url, 81 wizard_url=wizard_url, 82 ) 83 84 project_create_options.additional_properties = d 85 return project_create_options 86 87 @property 88 def additional_keys(self) -> List[str]: 89 return list(self.additional_properties.keys())
Attributes:
- enabled_account_types (List[CloudAccountType]):
- portal_account_id (str):
- portal_region (str):
- template_url (str):
- wizard_url (str):
28def __init__(self, enabled_account_types, portal_account_id, portal_region, template_url, wizard_url): 29 self.enabled_account_types = enabled_account_types 30 self.portal_account_id = portal_account_id 31 self.portal_region = portal_region 32 self.template_url = template_url 33 self.wizard_url = wizard_url 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectCreateOptions.
30 def to_dict(self) -> Dict[str, Any]: 31 enabled_account_types = [] 32 for enabled_account_types_item_data in self.enabled_account_types: 33 enabled_account_types_item = enabled_account_types_item_data.value 34 enabled_account_types.append(enabled_account_types_item) 35 36 portal_account_id = self.portal_account_id 37 38 portal_region = self.portal_region 39 40 template_url = self.template_url 41 42 wizard_url = self.wizard_url 43 44 field_dict: Dict[str, Any] = {} 45 field_dict.update(self.additional_properties) 46 field_dict.update( 47 { 48 "enabledAccountTypes": enabled_account_types, 49 "portalAccountId": portal_account_id, 50 "portalRegion": portal_region, 51 "templateUrl": template_url, 52 "wizardUrl": wizard_url, 53 } 54 ) 55 56 return field_dict
58 @classmethod 59 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 60 d = src_dict.copy() 61 enabled_account_types = [] 62 _enabled_account_types = d.pop("enabledAccountTypes") 63 for enabled_account_types_item_data in _enabled_account_types: 64 enabled_account_types_item = CloudAccountType(enabled_account_types_item_data) 65 66 enabled_account_types.append(enabled_account_types_item) 67 68 portal_account_id = d.pop("portalAccountId") 69 70 portal_region = d.pop("portalRegion") 71 72 template_url = d.pop("templateUrl") 73 74 wizard_url = d.pop("wizardUrl") 75 76 project_create_options = cls( 77 enabled_account_types=enabled_account_types, 78 portal_account_id=portal_account_id, 79 portal_region=portal_region, 80 template_url=template_url, 81 wizard_url=wizard_url, 82 ) 83 84 project_create_options.additional_properties = d 85 return project_create_options
21@_attrs_define 22class ProjectDetail: 23 """ 24 Attributes: 25 id (str): 26 name (str): 27 description (str): 28 billing_account_id (str): 29 contacts (List['Contact']): 30 organization (str): 31 status (Status): 32 settings (ProjectSettings): 33 account (CloudAccount): 34 status_message (str): 35 tags (List['Tag']): 36 classification_ids (List[str]): 37 created_by (str): 38 created_at (datetime.datetime): 39 updated_at (datetime.datetime): 40 """ 41 42 id: str 43 name: str 44 description: str 45 billing_account_id: str 46 contacts: List["Contact"] 47 organization: str 48 status: Status 49 settings: "ProjectSettings" 50 account: "CloudAccount" 51 status_message: str 52 tags: List["Tag"] 53 classification_ids: List[str] 54 created_by: str 55 created_at: datetime.datetime 56 updated_at: datetime.datetime 57 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 58 59 def to_dict(self) -> Dict[str, Any]: 60 id = self.id 61 62 name = self.name 63 64 description = self.description 65 66 billing_account_id = self.billing_account_id 67 68 contacts = [] 69 for contacts_item_data in self.contacts: 70 contacts_item = contacts_item_data.to_dict() 71 contacts.append(contacts_item) 72 73 organization = self.organization 74 75 status = self.status.value 76 77 settings = self.settings.to_dict() 78 79 account = self.account.to_dict() 80 81 status_message = self.status_message 82 83 tags = [] 84 for tags_item_data in self.tags: 85 tags_item = tags_item_data.to_dict() 86 tags.append(tags_item) 87 88 classification_ids = self.classification_ids 89 90 created_by = self.created_by 91 92 created_at = self.created_at.isoformat() 93 94 updated_at = self.updated_at.isoformat() 95 96 field_dict: Dict[str, Any] = {} 97 field_dict.update(self.additional_properties) 98 field_dict.update( 99 { 100 "id": id, 101 "name": name, 102 "description": description, 103 "billingAccountId": billing_account_id, 104 "contacts": contacts, 105 "organization": organization, 106 "status": status, 107 "settings": settings, 108 "account": account, 109 "statusMessage": status_message, 110 "tags": tags, 111 "classificationIds": classification_ids, 112 "createdBy": created_by, 113 "createdAt": created_at, 114 "updatedAt": updated_at, 115 } 116 ) 117 118 return field_dict 119 120 @classmethod 121 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 122 from ..models.cloud_account import CloudAccount 123 from ..models.contact import Contact 124 from ..models.project_settings import ProjectSettings 125 from ..models.tag import Tag 126 127 d = src_dict.copy() 128 id = d.pop("id") 129 130 name = d.pop("name") 131 132 description = d.pop("description") 133 134 billing_account_id = d.pop("billingAccountId") 135 136 contacts = [] 137 _contacts = d.pop("contacts") 138 for contacts_item_data in _contacts: 139 contacts_item = Contact.from_dict(contacts_item_data) 140 141 contacts.append(contacts_item) 142 143 organization = d.pop("organization") 144 145 status = Status(d.pop("status")) 146 147 settings = ProjectSettings.from_dict(d.pop("settings")) 148 149 account = CloudAccount.from_dict(d.pop("account")) 150 151 status_message = d.pop("statusMessage") 152 153 tags = [] 154 _tags = d.pop("tags") 155 for tags_item_data in _tags: 156 tags_item = Tag.from_dict(tags_item_data) 157 158 tags.append(tags_item) 159 160 classification_ids = cast(List[str], d.pop("classificationIds")) 161 162 created_by = d.pop("createdBy") 163 164 created_at = isoparse(d.pop("createdAt")) 165 166 updated_at = isoparse(d.pop("updatedAt")) 167 168 project_detail = cls( 169 id=id, 170 name=name, 171 description=description, 172 billing_account_id=billing_account_id, 173 contacts=contacts, 174 organization=organization, 175 status=status, 176 settings=settings, 177 account=account, 178 status_message=status_message, 179 tags=tags, 180 classification_ids=classification_ids, 181 created_by=created_by, 182 created_at=created_at, 183 updated_at=updated_at, 184 ) 185 186 project_detail.additional_properties = d 187 return project_detail 188 189 @property 190 def additional_keys(self) -> List[str]: 191 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- description (str):
- billing_account_id (str):
- contacts (List['Contact']):
- organization (str):
- status (Status):
- settings (ProjectSettings):
- account (CloudAccount):
- status_message (str):
- tags (List['Tag']):
- classification_ids (List[str]):
- created_by (str):
- created_at (datetime.datetime):
- updated_at (datetime.datetime):
38def __init__(self, id, name, description, billing_account_id, contacts, organization, status, settings, account, status_message, tags, classification_ids, created_by, created_at, updated_at): 39 self.id = id 40 self.name = name 41 self.description = description 42 self.billing_account_id = billing_account_id 43 self.contacts = contacts 44 self.organization = organization 45 self.status = status 46 self.settings = settings 47 self.account = account 48 self.status_message = status_message 49 self.tags = tags 50 self.classification_ids = classification_ids 51 self.created_by = created_by 52 self.created_at = created_at 53 self.updated_at = updated_at 54 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectDetail.
59 def to_dict(self) -> Dict[str, Any]: 60 id = self.id 61 62 name = self.name 63 64 description = self.description 65 66 billing_account_id = self.billing_account_id 67 68 contacts = [] 69 for contacts_item_data in self.contacts: 70 contacts_item = contacts_item_data.to_dict() 71 contacts.append(contacts_item) 72 73 organization = self.organization 74 75 status = self.status.value 76 77 settings = self.settings.to_dict() 78 79 account = self.account.to_dict() 80 81 status_message = self.status_message 82 83 tags = [] 84 for tags_item_data in self.tags: 85 tags_item = tags_item_data.to_dict() 86 tags.append(tags_item) 87 88 classification_ids = self.classification_ids 89 90 created_by = self.created_by 91 92 created_at = self.created_at.isoformat() 93 94 updated_at = self.updated_at.isoformat() 95 96 field_dict: Dict[str, Any] = {} 97 field_dict.update(self.additional_properties) 98 field_dict.update( 99 { 100 "id": id, 101 "name": name, 102 "description": description, 103 "billingAccountId": billing_account_id, 104 "contacts": contacts, 105 "organization": organization, 106 "status": status, 107 "settings": settings, 108 "account": account, 109 "statusMessage": status_message, 110 "tags": tags, 111 "classificationIds": classification_ids, 112 "createdBy": created_by, 113 "createdAt": created_at, 114 "updatedAt": updated_at, 115 } 116 ) 117 118 return field_dict
120 @classmethod 121 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 122 from ..models.cloud_account import CloudAccount 123 from ..models.contact import Contact 124 from ..models.project_settings import ProjectSettings 125 from ..models.tag import Tag 126 127 d = src_dict.copy() 128 id = d.pop("id") 129 130 name = d.pop("name") 131 132 description = d.pop("description") 133 134 billing_account_id = d.pop("billingAccountId") 135 136 contacts = [] 137 _contacts = d.pop("contacts") 138 for contacts_item_data in _contacts: 139 contacts_item = Contact.from_dict(contacts_item_data) 140 141 contacts.append(contacts_item) 142 143 organization = d.pop("organization") 144 145 status = Status(d.pop("status")) 146 147 settings = ProjectSettings.from_dict(d.pop("settings")) 148 149 account = CloudAccount.from_dict(d.pop("account")) 150 151 status_message = d.pop("statusMessage") 152 153 tags = [] 154 _tags = d.pop("tags") 155 for tags_item_data in _tags: 156 tags_item = Tag.from_dict(tags_item_data) 157 158 tags.append(tags_item) 159 160 classification_ids = cast(List[str], d.pop("classificationIds")) 161 162 created_by = d.pop("createdBy") 163 164 created_at = isoparse(d.pop("createdAt")) 165 166 updated_at = isoparse(d.pop("updatedAt")) 167 168 project_detail = cls( 169 id=id, 170 name=name, 171 description=description, 172 billing_account_id=billing_account_id, 173 contacts=contacts, 174 organization=organization, 175 status=status, 176 settings=settings, 177 account=account, 178 status_message=status_message, 179 tags=tags, 180 classification_ids=classification_ids, 181 created_by=created_by, 182 created_at=created_at, 183 updated_at=updated_at, 184 ) 185 186 project_detail.additional_properties = d 187 return project_detail
13@_attrs_define 14class ProjectFileAccessRequest: 15 """ 16 Attributes: 17 access_type (ProjectAccessType): 18 dataset_id (Union[None, Unset, str]): 19 token_lifetime_hours (Union[None, Unset, int]): 20 """ 21 22 access_type: ProjectAccessType 23 dataset_id: Union[None, Unset, str] = UNSET 24 token_lifetime_hours: Union[None, Unset, int] = UNSET 25 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> Dict[str, Any]: 28 access_type = self.access_type.value 29 30 dataset_id: Union[None, Unset, str] 31 if isinstance(self.dataset_id, Unset): 32 dataset_id = UNSET 33 else: 34 dataset_id = self.dataset_id 35 36 token_lifetime_hours: Union[None, Unset, int] 37 if isinstance(self.token_lifetime_hours, Unset): 38 token_lifetime_hours = UNSET 39 else: 40 token_lifetime_hours = self.token_lifetime_hours 41 42 field_dict: Dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "accessType": access_type, 47 } 48 ) 49 if dataset_id is not UNSET: 50 field_dict["datasetId"] = dataset_id 51 if token_lifetime_hours is not UNSET: 52 field_dict["tokenLifetimeHours"] = token_lifetime_hours 53 54 return field_dict 55 56 @classmethod 57 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 58 d = src_dict.copy() 59 access_type = ProjectAccessType(d.pop("accessType")) 60 61 def _parse_dataset_id(data: object) -> Union[None, Unset, str]: 62 if data is None: 63 return data 64 if isinstance(data, Unset): 65 return data 66 return cast(Union[None, Unset, str], data) 67 68 dataset_id = _parse_dataset_id(d.pop("datasetId", UNSET)) 69 70 def _parse_token_lifetime_hours(data: object) -> Union[None, Unset, int]: 71 if data is None: 72 return data 73 if isinstance(data, Unset): 74 return data 75 return cast(Union[None, Unset, int], data) 76 77 token_lifetime_hours = _parse_token_lifetime_hours(d.pop("tokenLifetimeHours", UNSET)) 78 79 project_file_access_request = cls( 80 access_type=access_type, 81 dataset_id=dataset_id, 82 token_lifetime_hours=token_lifetime_hours, 83 ) 84 85 project_file_access_request.additional_properties = d 86 return project_file_access_request 87 88 @property 89 def additional_keys(self) -> List[str]: 90 return list(self.additional_properties.keys())
Attributes:
- access_type (ProjectAccessType):
- dataset_id (Union[None, Unset, str]):
- token_lifetime_hours (Union[None, Unset, int]):
26def __init__(self, access_type, dataset_id=attr_dict['dataset_id'].default, token_lifetime_hours=attr_dict['token_lifetime_hours'].default): 27 self.access_type = access_type 28 self.dataset_id = dataset_id 29 self.token_lifetime_hours = token_lifetime_hours 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectFileAccessRequest.
27 def to_dict(self) -> Dict[str, Any]: 28 access_type = self.access_type.value 29 30 dataset_id: Union[None, Unset, str] 31 if isinstance(self.dataset_id, Unset): 32 dataset_id = UNSET 33 else: 34 dataset_id = self.dataset_id 35 36 token_lifetime_hours: Union[None, Unset, int] 37 if isinstance(self.token_lifetime_hours, Unset): 38 token_lifetime_hours = UNSET 39 else: 40 token_lifetime_hours = self.token_lifetime_hours 41 42 field_dict: Dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "accessType": access_type, 47 } 48 ) 49 if dataset_id is not UNSET: 50 field_dict["datasetId"] = dataset_id 51 if token_lifetime_hours is not UNSET: 52 field_dict["tokenLifetimeHours"] = token_lifetime_hours 53 54 return field_dict
56 @classmethod 57 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 58 d = src_dict.copy() 59 access_type = ProjectAccessType(d.pop("accessType")) 60 61 def _parse_dataset_id(data: object) -> Union[None, Unset, str]: 62 if data is None: 63 return data 64 if isinstance(data, Unset): 65 return data 66 return cast(Union[None, Unset, str], data) 67 68 dataset_id = _parse_dataset_id(d.pop("datasetId", UNSET)) 69 70 def _parse_token_lifetime_hours(data: object) -> Union[None, Unset, int]: 71 if data is None: 72 return data 73 if isinstance(data, Unset): 74 return data 75 return cast(Union[None, Unset, int], data) 76 77 token_lifetime_hours = _parse_token_lifetime_hours(d.pop("tokenLifetimeHours", UNSET)) 78 79 project_file_access_request = cls( 80 access_type=access_type, 81 dataset_id=dataset_id, 82 token_lifetime_hours=token_lifetime_hours, 83 ) 84 85 project_file_access_request.additional_properties = d 86 return project_file_access_request
19@_attrs_define 20class ProjectInput: 21 """ 22 Attributes: 23 name (str): 24 description (str): 25 billing_account_id (str): 26 settings (ProjectSettings): 27 contacts (List['Contact']): 28 account (Union['CloudAccount', None, Unset]): 29 classification_ids (Union[List[str], None, Unset]): 30 tags (Union[List['Tag'], None, Unset]): 31 """ 32 33 name: str 34 description: str 35 billing_account_id: str 36 settings: "ProjectSettings" 37 contacts: List["Contact"] 38 account: Union["CloudAccount", None, Unset] = UNSET 39 classification_ids: Union[List[str], None, Unset] = UNSET 40 tags: Union[List["Tag"], None, Unset] = UNSET 41 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 42 43 def to_dict(self) -> Dict[str, Any]: 44 from ..models.cloud_account import CloudAccount 45 46 name = self.name 47 48 description = self.description 49 50 billing_account_id = self.billing_account_id 51 52 settings = self.settings.to_dict() 53 54 contacts = [] 55 for contacts_item_data in self.contacts: 56 contacts_item = contacts_item_data.to_dict() 57 contacts.append(contacts_item) 58 59 account: Union[Dict[str, Any], None, Unset] 60 if isinstance(self.account, Unset): 61 account = UNSET 62 elif isinstance(self.account, CloudAccount): 63 account = self.account.to_dict() 64 else: 65 account = self.account 66 67 classification_ids: Union[List[str], None, Unset] 68 if isinstance(self.classification_ids, Unset): 69 classification_ids = UNSET 70 elif isinstance(self.classification_ids, list): 71 classification_ids = self.classification_ids 72 73 else: 74 classification_ids = self.classification_ids 75 76 tags: Union[List[Dict[str, Any]], None, Unset] 77 if isinstance(self.tags, Unset): 78 tags = UNSET 79 elif isinstance(self.tags, list): 80 tags = [] 81 for tags_type_0_item_data in self.tags: 82 tags_type_0_item = tags_type_0_item_data.to_dict() 83 tags.append(tags_type_0_item) 84 85 else: 86 tags = self.tags 87 88 field_dict: Dict[str, Any] = {} 89 field_dict.update(self.additional_properties) 90 field_dict.update( 91 { 92 "name": name, 93 "description": description, 94 "billingAccountId": billing_account_id, 95 "settings": settings, 96 "contacts": contacts, 97 } 98 ) 99 if account is not UNSET: 100 field_dict["account"] = account 101 if classification_ids is not UNSET: 102 field_dict["classificationIds"] = classification_ids 103 if tags is not UNSET: 104 field_dict["tags"] = tags 105 106 return field_dict 107 108 @classmethod 109 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 110 from ..models.cloud_account import CloudAccount 111 from ..models.contact import Contact 112 from ..models.project_settings import ProjectSettings 113 from ..models.tag import Tag 114 115 d = src_dict.copy() 116 name = d.pop("name") 117 118 description = d.pop("description") 119 120 billing_account_id = d.pop("billingAccountId") 121 122 settings = ProjectSettings.from_dict(d.pop("settings")) 123 124 contacts = [] 125 _contacts = d.pop("contacts") 126 for contacts_item_data in _contacts: 127 contacts_item = Contact.from_dict(contacts_item_data) 128 129 contacts.append(contacts_item) 130 131 def _parse_account(data: object) -> Union["CloudAccount", None, Unset]: 132 if data is None: 133 return data 134 if isinstance(data, Unset): 135 return data 136 try: 137 if not isinstance(data, dict): 138 raise TypeError() 139 account_type_1 = CloudAccount.from_dict(data) 140 141 return account_type_1 142 except: # noqa: E722 143 pass 144 return cast(Union["CloudAccount", None, Unset], data) 145 146 account = _parse_account(d.pop("account", UNSET)) 147 148 def _parse_classification_ids(data: object) -> Union[List[str], None, Unset]: 149 if data is None: 150 return data 151 if isinstance(data, Unset): 152 return data 153 try: 154 if not isinstance(data, list): 155 raise TypeError() 156 classification_ids_type_0 = cast(List[str], data) 157 158 return classification_ids_type_0 159 except: # noqa: E722 160 pass 161 return cast(Union[List[str], None, Unset], data) 162 163 classification_ids = _parse_classification_ids(d.pop("classificationIds", UNSET)) 164 165 def _parse_tags(data: object) -> Union[List["Tag"], None, Unset]: 166 if data is None: 167 return data 168 if isinstance(data, Unset): 169 return data 170 try: 171 if not isinstance(data, list): 172 raise TypeError() 173 tags_type_0 = [] 174 _tags_type_0 = data 175 for tags_type_0_item_data in _tags_type_0: 176 tags_type_0_item = Tag.from_dict(tags_type_0_item_data) 177 178 tags_type_0.append(tags_type_0_item) 179 180 return tags_type_0 181 except: # noqa: E722 182 pass 183 return cast(Union[List["Tag"], None, Unset], data) 184 185 tags = _parse_tags(d.pop("tags", UNSET)) 186 187 project_input = cls( 188 name=name, 189 description=description, 190 billing_account_id=billing_account_id, 191 settings=settings, 192 contacts=contacts, 193 account=account, 194 classification_ids=classification_ids, 195 tags=tags, 196 ) 197 198 project_input.additional_properties = d 199 return project_input 200 201 @property 202 def additional_keys(self) -> List[str]: 203 return list(self.additional_properties.keys())
Attributes:
- name (str):
- description (str):
- billing_account_id (str):
- settings (ProjectSettings):
- contacts (List['Contact']):
- account (Union['CloudAccount', None, Unset]):
- classification_ids (Union[List[str], None, Unset]):
- tags (Union[List['Tag'], None, Unset]):
31def __init__(self, name, description, billing_account_id, settings, contacts, account=attr_dict['account'].default, classification_ids=attr_dict['classification_ids'].default, tags=attr_dict['tags'].default): 32 self.name = name 33 self.description = description 34 self.billing_account_id = billing_account_id 35 self.settings = settings 36 self.contacts = contacts 37 self.account = account 38 self.classification_ids = classification_ids 39 self.tags = tags 40 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectInput.
43 def to_dict(self) -> Dict[str, Any]: 44 from ..models.cloud_account import CloudAccount 45 46 name = self.name 47 48 description = self.description 49 50 billing_account_id = self.billing_account_id 51 52 settings = self.settings.to_dict() 53 54 contacts = [] 55 for contacts_item_data in self.contacts: 56 contacts_item = contacts_item_data.to_dict() 57 contacts.append(contacts_item) 58 59 account: Union[Dict[str, Any], None, Unset] 60 if isinstance(self.account, Unset): 61 account = UNSET 62 elif isinstance(self.account, CloudAccount): 63 account = self.account.to_dict() 64 else: 65 account = self.account 66 67 classification_ids: Union[List[str], None, Unset] 68 if isinstance(self.classification_ids, Unset): 69 classification_ids = UNSET 70 elif isinstance(self.classification_ids, list): 71 classification_ids = self.classification_ids 72 73 else: 74 classification_ids = self.classification_ids 75 76 tags: Union[List[Dict[str, Any]], None, Unset] 77 if isinstance(self.tags, Unset): 78 tags = UNSET 79 elif isinstance(self.tags, list): 80 tags = [] 81 for tags_type_0_item_data in self.tags: 82 tags_type_0_item = tags_type_0_item_data.to_dict() 83 tags.append(tags_type_0_item) 84 85 else: 86 tags = self.tags 87 88 field_dict: Dict[str, Any] = {} 89 field_dict.update(self.additional_properties) 90 field_dict.update( 91 { 92 "name": name, 93 "description": description, 94 "billingAccountId": billing_account_id, 95 "settings": settings, 96 "contacts": contacts, 97 } 98 ) 99 if account is not UNSET: 100 field_dict["account"] = account 101 if classification_ids is not UNSET: 102 field_dict["classificationIds"] = classification_ids 103 if tags is not UNSET: 104 field_dict["tags"] = tags 105 106 return field_dict
108 @classmethod 109 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 110 from ..models.cloud_account import CloudAccount 111 from ..models.contact import Contact 112 from ..models.project_settings import ProjectSettings 113 from ..models.tag import Tag 114 115 d = src_dict.copy() 116 name = d.pop("name") 117 118 description = d.pop("description") 119 120 billing_account_id = d.pop("billingAccountId") 121 122 settings = ProjectSettings.from_dict(d.pop("settings")) 123 124 contacts = [] 125 _contacts = d.pop("contacts") 126 for contacts_item_data in _contacts: 127 contacts_item = Contact.from_dict(contacts_item_data) 128 129 contacts.append(contacts_item) 130 131 def _parse_account(data: object) -> Union["CloudAccount", None, Unset]: 132 if data is None: 133 return data 134 if isinstance(data, Unset): 135 return data 136 try: 137 if not isinstance(data, dict): 138 raise TypeError() 139 account_type_1 = CloudAccount.from_dict(data) 140 141 return account_type_1 142 except: # noqa: E722 143 pass 144 return cast(Union["CloudAccount", None, Unset], data) 145 146 account = _parse_account(d.pop("account", UNSET)) 147 148 def _parse_classification_ids(data: object) -> Union[List[str], None, Unset]: 149 if data is None: 150 return data 151 if isinstance(data, Unset): 152 return data 153 try: 154 if not isinstance(data, list): 155 raise TypeError() 156 classification_ids_type_0 = cast(List[str], data) 157 158 return classification_ids_type_0 159 except: # noqa: E722 160 pass 161 return cast(Union[List[str], None, Unset], data) 162 163 classification_ids = _parse_classification_ids(d.pop("classificationIds", UNSET)) 164 165 def _parse_tags(data: object) -> Union[List["Tag"], None, Unset]: 166 if data is None: 167 return data 168 if isinstance(data, Unset): 169 return data 170 try: 171 if not isinstance(data, list): 172 raise TypeError() 173 tags_type_0 = [] 174 _tags_type_0 = data 175 for tags_type_0_item_data in _tags_type_0: 176 tags_type_0_item = Tag.from_dict(tags_type_0_item_data) 177 178 tags_type_0.append(tags_type_0_item) 179 180 return tags_type_0 181 except: # noqa: E722 182 pass 183 return cast(Union[List["Tag"], None, Unset], data) 184 185 tags = _parse_tags(d.pop("tags", UNSET)) 186 187 project_input = cls( 188 name=name, 189 description=description, 190 billing_account_id=billing_account_id, 191 settings=settings, 192 contacts=contacts, 193 account=account, 194 classification_ids=classification_ids, 195 tags=tags, 196 ) 197 198 project_input.additional_properties = d 199 return project_input
16@_attrs_define 17class ProjectMetrics: 18 """ 19 Attributes: 20 project_id (str): 21 costs (Union[Unset, List['MetricRecord']]): Costs by service by month Example: [{ 22 "date": "2022-11-01", 23 "unit": "$", 24 "service": { 25 "Other": 26.47, 26 "EC2 - Other": 3.66, 27 "Amazon Elastic Compute Cloud - Compute": 140.59, 28 "Amazon Simple Storage Service": 24.91, 29 "AmazonCloudWatch": 2.09 30 } 31 }] 32 . 33 storage_metrics (Union[Unset, List['MetricRecord']]): Storage usage by tier by day Example: [{ 34 "date": "2023-12-12", 35 "unit": "GB", 36 "service": { 37 "IntelligentTieringAIAStorage": 4198.95, 38 "IntelligentTieringFAStorage": 1516.48, 39 "StandardStorage": 1.9, 40 "IntelligentTieringIAStorage": 2154.6 41 } 42 }] 43 . 44 """ 45 46 project_id: str 47 costs: Union[Unset, List["MetricRecord"]] = UNSET 48 storage_metrics: Union[Unset, List["MetricRecord"]] = UNSET 49 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 50 51 def to_dict(self) -> Dict[str, Any]: 52 project_id = self.project_id 53 54 costs: Union[Unset, List[Dict[str, Any]]] = UNSET 55 if not isinstance(self.costs, Unset): 56 costs = [] 57 for costs_item_data in self.costs: 58 costs_item = costs_item_data.to_dict() 59 costs.append(costs_item) 60 61 storage_metrics: Union[Unset, List[Dict[str, Any]]] = UNSET 62 if not isinstance(self.storage_metrics, Unset): 63 storage_metrics = [] 64 for storage_metrics_item_data in self.storage_metrics: 65 storage_metrics_item = storage_metrics_item_data.to_dict() 66 storage_metrics.append(storage_metrics_item) 67 68 field_dict: Dict[str, Any] = {} 69 field_dict.update(self.additional_properties) 70 field_dict.update( 71 { 72 "projectId": project_id, 73 } 74 ) 75 if costs is not UNSET: 76 field_dict["costs"] = costs 77 if storage_metrics is not UNSET: 78 field_dict["storageMetrics"] = storage_metrics 79 80 return field_dict 81 82 @classmethod 83 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 84 from ..models.metric_record import MetricRecord 85 86 d = src_dict.copy() 87 project_id = d.pop("projectId") 88 89 costs = [] 90 _costs = d.pop("costs", UNSET) 91 for costs_item_data in _costs or []: 92 costs_item = MetricRecord.from_dict(costs_item_data) 93 94 costs.append(costs_item) 95 96 storage_metrics = [] 97 _storage_metrics = d.pop("storageMetrics", UNSET) 98 for storage_metrics_item_data in _storage_metrics or []: 99 storage_metrics_item = MetricRecord.from_dict(storage_metrics_item_data) 100 101 storage_metrics.append(storage_metrics_item) 102 103 project_metrics = cls( 104 project_id=project_id, 105 costs=costs, 106 storage_metrics=storage_metrics, 107 ) 108 109 project_metrics.additional_properties = d 110 return project_metrics 111 112 @property 113 def additional_keys(self) -> List[str]: 114 return list(self.additional_properties.keys())
Attributes:
- project_id (str):
- costs (Union[Unset, List['MetricRecord']]): Costs by service by month Example: [{ "date": "2022-11-01", "unit": "$", "service": { "Other": 26.47, "EC2 - Other": 3.66, "Amazon Elastic Compute Cloud - Compute": 140.59, "Amazon Simple Storage Service": 24.91, "AmazonCloudWatch": 2.09 } }] .
- storage_metrics (Union[Unset, List['MetricRecord']]): Storage usage by tier by day Example: [{ "date": "2023-12-12", "unit": "GB", "service": { "IntelligentTieringAIAStorage": 4198.95, "IntelligentTieringFAStorage": 1516.48, "StandardStorage": 1.9, "IntelligentTieringIAStorage": 2154.6 } }] .
26def __init__(self, project_id, costs=attr_dict['costs'].default, storage_metrics=attr_dict['storage_metrics'].default): 27 self.project_id = project_id 28 self.costs = costs 29 self.storage_metrics = storage_metrics 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectMetrics.
51 def to_dict(self) -> Dict[str, Any]: 52 project_id = self.project_id 53 54 costs: Union[Unset, List[Dict[str, Any]]] = UNSET 55 if not isinstance(self.costs, Unset): 56 costs = [] 57 for costs_item_data in self.costs: 58 costs_item = costs_item_data.to_dict() 59 costs.append(costs_item) 60 61 storage_metrics: Union[Unset, List[Dict[str, Any]]] = UNSET 62 if not isinstance(self.storage_metrics, Unset): 63 storage_metrics = [] 64 for storage_metrics_item_data in self.storage_metrics: 65 storage_metrics_item = storage_metrics_item_data.to_dict() 66 storage_metrics.append(storage_metrics_item) 67 68 field_dict: Dict[str, Any] = {} 69 field_dict.update(self.additional_properties) 70 field_dict.update( 71 { 72 "projectId": project_id, 73 } 74 ) 75 if costs is not UNSET: 76 field_dict["costs"] = costs 77 if storage_metrics is not UNSET: 78 field_dict["storageMetrics"] = storage_metrics 79 80 return field_dict
82 @classmethod 83 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 84 from ..models.metric_record import MetricRecord 85 86 d = src_dict.copy() 87 project_id = d.pop("projectId") 88 89 costs = [] 90 _costs = d.pop("costs", UNSET) 91 for costs_item_data in _costs or []: 92 costs_item = MetricRecord.from_dict(costs_item_data) 93 94 costs.append(costs_item) 95 96 storage_metrics = [] 97 _storage_metrics = d.pop("storageMetrics", UNSET) 98 for storage_metrics_item_data in _storage_metrics or []: 99 storage_metrics_item = MetricRecord.from_dict(storage_metrics_item_data) 100 101 storage_metrics.append(storage_metrics_item) 102 103 project_metrics = cls( 104 project_id=project_id, 105 costs=costs, 106 storage_metrics=storage_metrics, 107 ) 108 109 project_metrics.additional_properties = d 110 return project_metrics
10@_attrs_define 11class ProjectRequest: 12 """ 13 Attributes: 14 name (str): 15 description (str): 16 classification_ids (List[str]): 17 billing_info (str): 18 admin_username (str): 19 message (str): 20 """ 21 22 name: str 23 description: str 24 classification_ids: List[str] 25 billing_info: str 26 admin_username: str 27 message: str 28 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 29 30 def to_dict(self) -> Dict[str, Any]: 31 name = self.name 32 33 description = self.description 34 35 classification_ids = self.classification_ids 36 37 billing_info = self.billing_info 38 39 admin_username = self.admin_username 40 41 message = self.message 42 43 field_dict: Dict[str, Any] = {} 44 field_dict.update(self.additional_properties) 45 field_dict.update( 46 { 47 "name": name, 48 "description": description, 49 "classificationIds": classification_ids, 50 "billingInfo": billing_info, 51 "adminUsername": admin_username, 52 "message": message, 53 } 54 ) 55 56 return field_dict 57 58 @classmethod 59 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 60 d = src_dict.copy() 61 name = d.pop("name") 62 63 description = d.pop("description") 64 65 classification_ids = cast(List[str], d.pop("classificationIds")) 66 67 billing_info = d.pop("billingInfo") 68 69 admin_username = d.pop("adminUsername") 70 71 message = d.pop("message") 72 73 project_request = cls( 74 name=name, 75 description=description, 76 classification_ids=classification_ids, 77 billing_info=billing_info, 78 admin_username=admin_username, 79 message=message, 80 ) 81 82 project_request.additional_properties = d 83 return project_request 84 85 @property 86 def additional_keys(self) -> List[str]: 87 return list(self.additional_properties.keys())
Attributes:
- name (str):
- description (str):
- classification_ids (List[str]):
- billing_info (str):
- admin_username (str):
- message (str):
29def __init__(self, name, description, classification_ids, billing_info, admin_username, message): 30 self.name = name 31 self.description = description 32 self.classification_ids = classification_ids 33 self.billing_info = billing_info 34 self.admin_username = admin_username 35 self.message = message 36 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectRequest.
30 def to_dict(self) -> Dict[str, Any]: 31 name = self.name 32 33 description = self.description 34 35 classification_ids = self.classification_ids 36 37 billing_info = self.billing_info 38 39 admin_username = self.admin_username 40 41 message = self.message 42 43 field_dict: Dict[str, Any] = {} 44 field_dict.update(self.additional_properties) 45 field_dict.update( 46 { 47 "name": name, 48 "description": description, 49 "classificationIds": classification_ids, 50 "billingInfo": billing_info, 51 "adminUsername": admin_username, 52 "message": message, 53 } 54 ) 55 56 return field_dict
58 @classmethod 59 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 60 d = src_dict.copy() 61 name = d.pop("name") 62 63 description = d.pop("description") 64 65 classification_ids = cast(List[str], d.pop("classificationIds")) 66 67 billing_info = d.pop("billingInfo") 68 69 admin_username = d.pop("adminUsername") 70 71 message = d.pop("message") 72 73 project_request = cls( 74 name=name, 75 description=description, 76 classification_ids=classification_ids, 77 billing_info=billing_info, 78 admin_username=admin_username, 79 message=message, 80 ) 81 82 project_request.additional_properties = d 83 return project_request
23@_attrs_define 24class ProjectRequirement: 25 """ 26 Attributes: 27 id (str): The unique identifier for the requirement 28 name (str): The name of the requirement 29 description (str): A brief description of the requirement 30 type (GovernanceType): The types of governance requirements that can be enforced 31 path (str): S3 prefix where the main file for the requirement is saved 32 supplemental_path (str): S3 prefix where supplemental files for the requirement are saved 33 scope (GovernanceScope): The levels at which governance requirements can be enforced 34 contacts (List['GovernanceContact']): The governance contacts assigned to the requirement. 35 is_enacted (bool): Whether the requirement is past the enactment date 36 is_project_configured (bool): A requirement is project configured if it was created by the tenant but needs a 37 file uploaded by the project 38 is_fulfilled (bool): Whether the current user has fulfilled the requirement for this project 39 acceptance (Union[GovernanceScope, None, Unset]): Specifies the level at which it is satisfied 40 enactment_date (Union[None, Unset, datetime.datetime]): The date of enactment for the requirement 41 expiration_type (Union[Unset, GovernanceExpiryType]): The expiry conditions that can be applied to governance 42 requirements. 43 expiration_days_after_completion (Union[None, Unset, int]): The number of days for a relative to completion 44 expiration 45 expiration_date (Union[None, Unset, datetime.datetime]): The date of expiration for the requirement 46 supplemental_docs (Union[List['GovernanceFile'], None, Unset]): Optional files with extra information, e.g. 47 templates for documents, links, etc 48 file (Union['GovernanceFile', None, Unset]): 49 authorship (Union[GovernanceScope, None, Unset]): Who needs to supply the agreement document 50 verification_method (Union[GovernanceTrainingVerification, None, Unset]): The value indicating how the 51 completion of the training is verified. 52 fulfillment_id (Union[None, Unset, str]): The id for the requirement fulfillment 53 fulfillment_date (Union[None, Unset, datetime.datetime]): The date the requirement was fulfilled by the user 54 fulfillment_file (Union[None, Unset, str]): The optional file uploaded to fulfill the requirement 55 fulfillment_path (Union[None, Unset, str]): The path to the optional fulfillment file 56 """ 57 58 id: str 59 name: str 60 description: str 61 type: GovernanceType 62 path: str 63 supplemental_path: str 64 scope: GovernanceScope 65 contacts: List["GovernanceContact"] 66 is_enacted: bool 67 is_project_configured: bool 68 is_fulfilled: bool 69 acceptance: Union[GovernanceScope, None, Unset] = UNSET 70 enactment_date: Union[None, Unset, datetime.datetime] = UNSET 71 expiration_type: Union[Unset, GovernanceExpiryType] = UNSET 72 expiration_days_after_completion: Union[None, Unset, int] = UNSET 73 expiration_date: Union[None, Unset, datetime.datetime] = UNSET 74 supplemental_docs: Union[List["GovernanceFile"], None, Unset] = UNSET 75 file: Union["GovernanceFile", None, Unset] = UNSET 76 authorship: Union[GovernanceScope, None, Unset] = UNSET 77 verification_method: Union[GovernanceTrainingVerification, None, Unset] = UNSET 78 fulfillment_id: Union[None, Unset, str] = UNSET 79 fulfillment_date: Union[None, Unset, datetime.datetime] = UNSET 80 fulfillment_file: Union[None, Unset, str] = UNSET 81 fulfillment_path: Union[None, Unset, str] = UNSET 82 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 83 84 def to_dict(self) -> Dict[str, Any]: 85 from ..models.governance_file import GovernanceFile 86 87 id = self.id 88 89 name = self.name 90 91 description = self.description 92 93 type = self.type.value 94 95 path = self.path 96 97 supplemental_path = self.supplemental_path 98 99 scope = self.scope.value 100 101 contacts = [] 102 for contacts_item_data in self.contacts: 103 contacts_item = contacts_item_data.to_dict() 104 contacts.append(contacts_item) 105 106 is_enacted = self.is_enacted 107 108 is_project_configured = self.is_project_configured 109 110 is_fulfilled = self.is_fulfilled 111 112 acceptance: Union[None, Unset, str] 113 if isinstance(self.acceptance, Unset): 114 acceptance = UNSET 115 elif isinstance(self.acceptance, GovernanceScope): 116 acceptance = self.acceptance.value 117 else: 118 acceptance = self.acceptance 119 120 enactment_date: Union[None, Unset, str] 121 if isinstance(self.enactment_date, Unset): 122 enactment_date = UNSET 123 elif isinstance(self.enactment_date, datetime.datetime): 124 enactment_date = self.enactment_date.isoformat() 125 else: 126 enactment_date = self.enactment_date 127 128 expiration_type: Union[Unset, str] = UNSET 129 if not isinstance(self.expiration_type, Unset): 130 expiration_type = self.expiration_type.value 131 132 expiration_days_after_completion: Union[None, Unset, int] 133 if isinstance(self.expiration_days_after_completion, Unset): 134 expiration_days_after_completion = UNSET 135 else: 136 expiration_days_after_completion = self.expiration_days_after_completion 137 138 expiration_date: Union[None, Unset, str] 139 if isinstance(self.expiration_date, Unset): 140 expiration_date = UNSET 141 elif isinstance(self.expiration_date, datetime.datetime): 142 expiration_date = self.expiration_date.isoformat() 143 else: 144 expiration_date = self.expiration_date 145 146 supplemental_docs: Union[List[Dict[str, Any]], None, Unset] 147 if isinstance(self.supplemental_docs, Unset): 148 supplemental_docs = UNSET 149 elif isinstance(self.supplemental_docs, list): 150 supplemental_docs = [] 151 for supplemental_docs_type_0_item_data in self.supplemental_docs: 152 supplemental_docs_type_0_item = supplemental_docs_type_0_item_data.to_dict() 153 supplemental_docs.append(supplemental_docs_type_0_item) 154 155 else: 156 supplemental_docs = self.supplemental_docs 157 158 file: Union[Dict[str, Any], None, Unset] 159 if isinstance(self.file, Unset): 160 file = UNSET 161 elif isinstance(self.file, GovernanceFile): 162 file = self.file.to_dict() 163 else: 164 file = self.file 165 166 authorship: Union[None, Unset, str] 167 if isinstance(self.authorship, Unset): 168 authorship = UNSET 169 elif isinstance(self.authorship, GovernanceScope): 170 authorship = self.authorship.value 171 else: 172 authorship = self.authorship 173 174 verification_method: Union[None, Unset, str] 175 if isinstance(self.verification_method, Unset): 176 verification_method = UNSET 177 elif isinstance(self.verification_method, GovernanceTrainingVerification): 178 verification_method = self.verification_method.value 179 else: 180 verification_method = self.verification_method 181 182 fulfillment_id: Union[None, Unset, str] 183 if isinstance(self.fulfillment_id, Unset): 184 fulfillment_id = UNSET 185 else: 186 fulfillment_id = self.fulfillment_id 187 188 fulfillment_date: Union[None, Unset, str] 189 if isinstance(self.fulfillment_date, Unset): 190 fulfillment_date = UNSET 191 elif isinstance(self.fulfillment_date, datetime.datetime): 192 fulfillment_date = self.fulfillment_date.isoformat() 193 else: 194 fulfillment_date = self.fulfillment_date 195 196 fulfillment_file: Union[None, Unset, str] 197 if isinstance(self.fulfillment_file, Unset): 198 fulfillment_file = UNSET 199 else: 200 fulfillment_file = self.fulfillment_file 201 202 fulfillment_path: Union[None, Unset, str] 203 if isinstance(self.fulfillment_path, Unset): 204 fulfillment_path = UNSET 205 else: 206 fulfillment_path = self.fulfillment_path 207 208 field_dict: Dict[str, Any] = {} 209 field_dict.update(self.additional_properties) 210 field_dict.update( 211 { 212 "id": id, 213 "name": name, 214 "description": description, 215 "type": type, 216 "path": path, 217 "supplementalPath": supplemental_path, 218 "scope": scope, 219 "contacts": contacts, 220 "isEnacted": is_enacted, 221 "isProjectConfigured": is_project_configured, 222 "isFulfilled": is_fulfilled, 223 } 224 ) 225 if acceptance is not UNSET: 226 field_dict["acceptance"] = acceptance 227 if enactment_date is not UNSET: 228 field_dict["enactmentDate"] = enactment_date 229 if expiration_type is not UNSET: 230 field_dict["expirationType"] = expiration_type 231 if expiration_days_after_completion is not UNSET: 232 field_dict["expirationDaysAfterCompletion"] = expiration_days_after_completion 233 if expiration_date is not UNSET: 234 field_dict["expirationDate"] = expiration_date 235 if supplemental_docs is not UNSET: 236 field_dict["supplementalDocs"] = supplemental_docs 237 if file is not UNSET: 238 field_dict["file"] = file 239 if authorship is not UNSET: 240 field_dict["authorship"] = authorship 241 if verification_method is not UNSET: 242 field_dict["verificationMethod"] = verification_method 243 if fulfillment_id is not UNSET: 244 field_dict["fulfillmentId"] = fulfillment_id 245 if fulfillment_date is not UNSET: 246 field_dict["fulfillmentDate"] = fulfillment_date 247 if fulfillment_file is not UNSET: 248 field_dict["fulfillmentFile"] = fulfillment_file 249 if fulfillment_path is not UNSET: 250 field_dict["fulfillmentPath"] = fulfillment_path 251 252 return field_dict 253 254 @classmethod 255 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 256 from ..models.governance_contact import GovernanceContact 257 from ..models.governance_file import GovernanceFile 258 259 d = src_dict.copy() 260 id = d.pop("id") 261 262 name = d.pop("name") 263 264 description = d.pop("description") 265 266 type = GovernanceType(d.pop("type")) 267 268 path = d.pop("path") 269 270 supplemental_path = d.pop("supplementalPath") 271 272 scope = GovernanceScope(d.pop("scope")) 273 274 contacts = [] 275 _contacts = d.pop("contacts") 276 for contacts_item_data in _contacts: 277 contacts_item = GovernanceContact.from_dict(contacts_item_data) 278 279 contacts.append(contacts_item) 280 281 is_enacted = d.pop("isEnacted") 282 283 is_project_configured = d.pop("isProjectConfigured") 284 285 is_fulfilled = d.pop("isFulfilled") 286 287 def _parse_acceptance(data: object) -> Union[GovernanceScope, None, Unset]: 288 if data is None: 289 return data 290 if isinstance(data, Unset): 291 return data 292 try: 293 if not isinstance(data, str): 294 raise TypeError() 295 acceptance_type_1 = GovernanceScope(data) 296 297 return acceptance_type_1 298 except: # noqa: E722 299 pass 300 return cast(Union[GovernanceScope, None, Unset], data) 301 302 acceptance = _parse_acceptance(d.pop("acceptance", UNSET)) 303 304 def _parse_enactment_date(data: object) -> Union[None, Unset, datetime.datetime]: 305 if data is None: 306 return data 307 if isinstance(data, Unset): 308 return data 309 try: 310 if not isinstance(data, str): 311 raise TypeError() 312 enactment_date_type_0 = isoparse(data) 313 314 return enactment_date_type_0 315 except: # noqa: E722 316 pass 317 return cast(Union[None, Unset, datetime.datetime], data) 318 319 enactment_date = _parse_enactment_date(d.pop("enactmentDate", UNSET)) 320 321 _expiration_type = d.pop("expirationType", UNSET) 322 expiration_type: Union[Unset, GovernanceExpiryType] 323 if isinstance(_expiration_type, Unset): 324 expiration_type = UNSET 325 else: 326 expiration_type = GovernanceExpiryType(_expiration_type) 327 328 def _parse_expiration_days_after_completion(data: object) -> Union[None, Unset, int]: 329 if data is None: 330 return data 331 if isinstance(data, Unset): 332 return data 333 return cast(Union[None, Unset, int], data) 334 335 expiration_days_after_completion = _parse_expiration_days_after_completion( 336 d.pop("expirationDaysAfterCompletion", UNSET) 337 ) 338 339 def _parse_expiration_date(data: object) -> Union[None, Unset, datetime.datetime]: 340 if data is None: 341 return data 342 if isinstance(data, Unset): 343 return data 344 try: 345 if not isinstance(data, str): 346 raise TypeError() 347 expiration_date_type_0 = isoparse(data) 348 349 return expiration_date_type_0 350 except: # noqa: E722 351 pass 352 return cast(Union[None, Unset, datetime.datetime], data) 353 354 expiration_date = _parse_expiration_date(d.pop("expirationDate", UNSET)) 355 356 def _parse_supplemental_docs(data: object) -> Union[List["GovernanceFile"], None, Unset]: 357 if data is None: 358 return data 359 if isinstance(data, Unset): 360 return data 361 try: 362 if not isinstance(data, list): 363 raise TypeError() 364 supplemental_docs_type_0 = [] 365 _supplemental_docs_type_0 = data 366 for supplemental_docs_type_0_item_data in _supplemental_docs_type_0: 367 supplemental_docs_type_0_item = GovernanceFile.from_dict(supplemental_docs_type_0_item_data) 368 369 supplemental_docs_type_0.append(supplemental_docs_type_0_item) 370 371 return supplemental_docs_type_0 372 except: # noqa: E722 373 pass 374 return cast(Union[List["GovernanceFile"], None, Unset], data) 375 376 supplemental_docs = _parse_supplemental_docs(d.pop("supplementalDocs", UNSET)) 377 378 def _parse_file(data: object) -> Union["GovernanceFile", None, Unset]: 379 if data is None: 380 return data 381 if isinstance(data, Unset): 382 return data 383 try: 384 if not isinstance(data, dict): 385 raise TypeError() 386 file_type_1 = GovernanceFile.from_dict(data) 387 388 return file_type_1 389 except: # noqa: E722 390 pass 391 return cast(Union["GovernanceFile", None, Unset], data) 392 393 file = _parse_file(d.pop("file", UNSET)) 394 395 def _parse_authorship(data: object) -> Union[GovernanceScope, None, Unset]: 396 if data is None: 397 return data 398 if isinstance(data, Unset): 399 return data 400 try: 401 if not isinstance(data, str): 402 raise TypeError() 403 authorship_type_1 = GovernanceScope(data) 404 405 return authorship_type_1 406 except: # noqa: E722 407 pass 408 return cast(Union[GovernanceScope, None, Unset], data) 409 410 authorship = _parse_authorship(d.pop("authorship", UNSET)) 411 412 def _parse_verification_method(data: object) -> Union[GovernanceTrainingVerification, None, Unset]: 413 if data is None: 414 return data 415 if isinstance(data, Unset): 416 return data 417 try: 418 if not isinstance(data, str): 419 raise TypeError() 420 verification_method_type_1 = GovernanceTrainingVerification(data) 421 422 return verification_method_type_1 423 except: # noqa: E722 424 pass 425 return cast(Union[GovernanceTrainingVerification, None, Unset], data) 426 427 verification_method = _parse_verification_method(d.pop("verificationMethod", UNSET)) 428 429 def _parse_fulfillment_id(data: object) -> Union[None, Unset, str]: 430 if data is None: 431 return data 432 if isinstance(data, Unset): 433 return data 434 return cast(Union[None, Unset, str], data) 435 436 fulfillment_id = _parse_fulfillment_id(d.pop("fulfillmentId", UNSET)) 437 438 def _parse_fulfillment_date(data: object) -> Union[None, Unset, datetime.datetime]: 439 if data is None: 440 return data 441 if isinstance(data, Unset): 442 return data 443 try: 444 if not isinstance(data, str): 445 raise TypeError() 446 fulfillment_date_type_0 = isoparse(data) 447 448 return fulfillment_date_type_0 449 except: # noqa: E722 450 pass 451 return cast(Union[None, Unset, datetime.datetime], data) 452 453 fulfillment_date = _parse_fulfillment_date(d.pop("fulfillmentDate", UNSET)) 454 455 def _parse_fulfillment_file(data: object) -> Union[None, Unset, str]: 456 if data is None: 457 return data 458 if isinstance(data, Unset): 459 return data 460 return cast(Union[None, Unset, str], data) 461 462 fulfillment_file = _parse_fulfillment_file(d.pop("fulfillmentFile", UNSET)) 463 464 def _parse_fulfillment_path(data: object) -> Union[None, Unset, str]: 465 if data is None: 466 return data 467 if isinstance(data, Unset): 468 return data 469 return cast(Union[None, Unset, str], data) 470 471 fulfillment_path = _parse_fulfillment_path(d.pop("fulfillmentPath", UNSET)) 472 473 project_requirement = cls( 474 id=id, 475 name=name, 476 description=description, 477 type=type, 478 path=path, 479 supplemental_path=supplemental_path, 480 scope=scope, 481 contacts=contacts, 482 is_enacted=is_enacted, 483 is_project_configured=is_project_configured, 484 is_fulfilled=is_fulfilled, 485 acceptance=acceptance, 486 enactment_date=enactment_date, 487 expiration_type=expiration_type, 488 expiration_days_after_completion=expiration_days_after_completion, 489 expiration_date=expiration_date, 490 supplemental_docs=supplemental_docs, 491 file=file, 492 authorship=authorship, 493 verification_method=verification_method, 494 fulfillment_id=fulfillment_id, 495 fulfillment_date=fulfillment_date, 496 fulfillment_file=fulfillment_file, 497 fulfillment_path=fulfillment_path, 498 ) 499 500 project_requirement.additional_properties = d 501 return project_requirement 502 503 @property 504 def additional_keys(self) -> List[str]: 505 return list(self.additional_properties.keys())
Attributes:
- id (str): The unique identifier for the requirement
- name (str): The name of the requirement
- description (str): A brief description of the requirement
- type (GovernanceType): The types of governance requirements that can be enforced
- path (str): S3 prefix where the main file for the requirement is saved
- supplemental_path (str): S3 prefix where supplemental files for the requirement are saved
- scope (GovernanceScope): The levels at which governance requirements can be enforced
- contacts (List['GovernanceContact']): The governance contacts assigned to the requirement.
- is_enacted (bool): Whether the requirement is past the enactment date
- is_project_configured (bool): A requirement is project configured if it was created by the tenant but needs a file uploaded by the project
- is_fulfilled (bool): Whether the current user has fulfilled the requirement for this project
- acceptance (Union[GovernanceScope, None, Unset]): Specifies the level at which it is satisfied
- enactment_date (Union[None, Unset, datetime.datetime]): The date of enactment for the requirement
- expiration_type (Union[Unset, GovernanceExpiryType]): The expiry conditions that can be applied to governance requirements.
- expiration_days_after_completion (Union[None, Unset, int]): The number of days for a relative to completion expiration
- expiration_date (Union[None, Unset, datetime.datetime]): The date of expiration for the requirement
- supplemental_docs (Union[List['GovernanceFile'], None, Unset]): Optional files with extra information, e.g. templates for documents, links, etc
- file (Union['GovernanceFile', None, Unset]):
- authorship (Union[GovernanceScope, None, Unset]): Who needs to supply the agreement document
- verification_method (Union[GovernanceTrainingVerification, None, Unset]): The value indicating how the completion of the training is verified.
- fulfillment_id (Union[None, Unset, str]): The id for the requirement fulfillment
- fulfillment_date (Union[None, Unset, datetime.datetime]): The date the requirement was fulfilled by the user
- fulfillment_file (Union[None, Unset, str]): The optional file uploaded to fulfill the requirement
- fulfillment_path (Union[None, Unset, str]): The path to the optional fulfillment file
47def __init__(self, id, name, description, type, path, supplemental_path, scope, contacts, is_enacted, is_project_configured, is_fulfilled, acceptance=attr_dict['acceptance'].default, enactment_date=attr_dict['enactment_date'].default, expiration_type=attr_dict['expiration_type'].default, expiration_days_after_completion=attr_dict['expiration_days_after_completion'].default, expiration_date=attr_dict['expiration_date'].default, supplemental_docs=attr_dict['supplemental_docs'].default, file=attr_dict['file'].default, authorship=attr_dict['authorship'].default, verification_method=attr_dict['verification_method'].default, fulfillment_id=attr_dict['fulfillment_id'].default, fulfillment_date=attr_dict['fulfillment_date'].default, fulfillment_file=attr_dict['fulfillment_file'].default, fulfillment_path=attr_dict['fulfillment_path'].default): 48 self.id = id 49 self.name = name 50 self.description = description 51 self.type = type 52 self.path = path 53 self.supplemental_path = supplemental_path 54 self.scope = scope 55 self.contacts = contacts 56 self.is_enacted = is_enacted 57 self.is_project_configured = is_project_configured 58 self.is_fulfilled = is_fulfilled 59 self.acceptance = acceptance 60 self.enactment_date = enactment_date 61 self.expiration_type = expiration_type 62 self.expiration_days_after_completion = expiration_days_after_completion 63 self.expiration_date = expiration_date 64 self.supplemental_docs = supplemental_docs 65 self.file = file 66 self.authorship = authorship 67 self.verification_method = verification_method 68 self.fulfillment_id = fulfillment_id 69 self.fulfillment_date = fulfillment_date 70 self.fulfillment_file = fulfillment_file 71 self.fulfillment_path = fulfillment_path 72 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectRequirement.
84 def to_dict(self) -> Dict[str, Any]: 85 from ..models.governance_file import GovernanceFile 86 87 id = self.id 88 89 name = self.name 90 91 description = self.description 92 93 type = self.type.value 94 95 path = self.path 96 97 supplemental_path = self.supplemental_path 98 99 scope = self.scope.value 100 101 contacts = [] 102 for contacts_item_data in self.contacts: 103 contacts_item = contacts_item_data.to_dict() 104 contacts.append(contacts_item) 105 106 is_enacted = self.is_enacted 107 108 is_project_configured = self.is_project_configured 109 110 is_fulfilled = self.is_fulfilled 111 112 acceptance: Union[None, Unset, str] 113 if isinstance(self.acceptance, Unset): 114 acceptance = UNSET 115 elif isinstance(self.acceptance, GovernanceScope): 116 acceptance = self.acceptance.value 117 else: 118 acceptance = self.acceptance 119 120 enactment_date: Union[None, Unset, str] 121 if isinstance(self.enactment_date, Unset): 122 enactment_date = UNSET 123 elif isinstance(self.enactment_date, datetime.datetime): 124 enactment_date = self.enactment_date.isoformat() 125 else: 126 enactment_date = self.enactment_date 127 128 expiration_type: Union[Unset, str] = UNSET 129 if not isinstance(self.expiration_type, Unset): 130 expiration_type = self.expiration_type.value 131 132 expiration_days_after_completion: Union[None, Unset, int] 133 if isinstance(self.expiration_days_after_completion, Unset): 134 expiration_days_after_completion = UNSET 135 else: 136 expiration_days_after_completion = self.expiration_days_after_completion 137 138 expiration_date: Union[None, Unset, str] 139 if isinstance(self.expiration_date, Unset): 140 expiration_date = UNSET 141 elif isinstance(self.expiration_date, datetime.datetime): 142 expiration_date = self.expiration_date.isoformat() 143 else: 144 expiration_date = self.expiration_date 145 146 supplemental_docs: Union[List[Dict[str, Any]], None, Unset] 147 if isinstance(self.supplemental_docs, Unset): 148 supplemental_docs = UNSET 149 elif isinstance(self.supplemental_docs, list): 150 supplemental_docs = [] 151 for supplemental_docs_type_0_item_data in self.supplemental_docs: 152 supplemental_docs_type_0_item = supplemental_docs_type_0_item_data.to_dict() 153 supplemental_docs.append(supplemental_docs_type_0_item) 154 155 else: 156 supplemental_docs = self.supplemental_docs 157 158 file: Union[Dict[str, Any], None, Unset] 159 if isinstance(self.file, Unset): 160 file = UNSET 161 elif isinstance(self.file, GovernanceFile): 162 file = self.file.to_dict() 163 else: 164 file = self.file 165 166 authorship: Union[None, Unset, str] 167 if isinstance(self.authorship, Unset): 168 authorship = UNSET 169 elif isinstance(self.authorship, GovernanceScope): 170 authorship = self.authorship.value 171 else: 172 authorship = self.authorship 173 174 verification_method: Union[None, Unset, str] 175 if isinstance(self.verification_method, Unset): 176 verification_method = UNSET 177 elif isinstance(self.verification_method, GovernanceTrainingVerification): 178 verification_method = self.verification_method.value 179 else: 180 verification_method = self.verification_method 181 182 fulfillment_id: Union[None, Unset, str] 183 if isinstance(self.fulfillment_id, Unset): 184 fulfillment_id = UNSET 185 else: 186 fulfillment_id = self.fulfillment_id 187 188 fulfillment_date: Union[None, Unset, str] 189 if isinstance(self.fulfillment_date, Unset): 190 fulfillment_date = UNSET 191 elif isinstance(self.fulfillment_date, datetime.datetime): 192 fulfillment_date = self.fulfillment_date.isoformat() 193 else: 194 fulfillment_date = self.fulfillment_date 195 196 fulfillment_file: Union[None, Unset, str] 197 if isinstance(self.fulfillment_file, Unset): 198 fulfillment_file = UNSET 199 else: 200 fulfillment_file = self.fulfillment_file 201 202 fulfillment_path: Union[None, Unset, str] 203 if isinstance(self.fulfillment_path, Unset): 204 fulfillment_path = UNSET 205 else: 206 fulfillment_path = self.fulfillment_path 207 208 field_dict: Dict[str, Any] = {} 209 field_dict.update(self.additional_properties) 210 field_dict.update( 211 { 212 "id": id, 213 "name": name, 214 "description": description, 215 "type": type, 216 "path": path, 217 "supplementalPath": supplemental_path, 218 "scope": scope, 219 "contacts": contacts, 220 "isEnacted": is_enacted, 221 "isProjectConfigured": is_project_configured, 222 "isFulfilled": is_fulfilled, 223 } 224 ) 225 if acceptance is not UNSET: 226 field_dict["acceptance"] = acceptance 227 if enactment_date is not UNSET: 228 field_dict["enactmentDate"] = enactment_date 229 if expiration_type is not UNSET: 230 field_dict["expirationType"] = expiration_type 231 if expiration_days_after_completion is not UNSET: 232 field_dict["expirationDaysAfterCompletion"] = expiration_days_after_completion 233 if expiration_date is not UNSET: 234 field_dict["expirationDate"] = expiration_date 235 if supplemental_docs is not UNSET: 236 field_dict["supplementalDocs"] = supplemental_docs 237 if file is not UNSET: 238 field_dict["file"] = file 239 if authorship is not UNSET: 240 field_dict["authorship"] = authorship 241 if verification_method is not UNSET: 242 field_dict["verificationMethod"] = verification_method 243 if fulfillment_id is not UNSET: 244 field_dict["fulfillmentId"] = fulfillment_id 245 if fulfillment_date is not UNSET: 246 field_dict["fulfillmentDate"] = fulfillment_date 247 if fulfillment_file is not UNSET: 248 field_dict["fulfillmentFile"] = fulfillment_file 249 if fulfillment_path is not UNSET: 250 field_dict["fulfillmentPath"] = fulfillment_path 251 252 return field_dict
254 @classmethod 255 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 256 from ..models.governance_contact import GovernanceContact 257 from ..models.governance_file import GovernanceFile 258 259 d = src_dict.copy() 260 id = d.pop("id") 261 262 name = d.pop("name") 263 264 description = d.pop("description") 265 266 type = GovernanceType(d.pop("type")) 267 268 path = d.pop("path") 269 270 supplemental_path = d.pop("supplementalPath") 271 272 scope = GovernanceScope(d.pop("scope")) 273 274 contacts = [] 275 _contacts = d.pop("contacts") 276 for contacts_item_data in _contacts: 277 contacts_item = GovernanceContact.from_dict(contacts_item_data) 278 279 contacts.append(contacts_item) 280 281 is_enacted = d.pop("isEnacted") 282 283 is_project_configured = d.pop("isProjectConfigured") 284 285 is_fulfilled = d.pop("isFulfilled") 286 287 def _parse_acceptance(data: object) -> Union[GovernanceScope, None, Unset]: 288 if data is None: 289 return data 290 if isinstance(data, Unset): 291 return data 292 try: 293 if not isinstance(data, str): 294 raise TypeError() 295 acceptance_type_1 = GovernanceScope(data) 296 297 return acceptance_type_1 298 except: # noqa: E722 299 pass 300 return cast(Union[GovernanceScope, None, Unset], data) 301 302 acceptance = _parse_acceptance(d.pop("acceptance", UNSET)) 303 304 def _parse_enactment_date(data: object) -> Union[None, Unset, datetime.datetime]: 305 if data is None: 306 return data 307 if isinstance(data, Unset): 308 return data 309 try: 310 if not isinstance(data, str): 311 raise TypeError() 312 enactment_date_type_0 = isoparse(data) 313 314 return enactment_date_type_0 315 except: # noqa: E722 316 pass 317 return cast(Union[None, Unset, datetime.datetime], data) 318 319 enactment_date = _parse_enactment_date(d.pop("enactmentDate", UNSET)) 320 321 _expiration_type = d.pop("expirationType", UNSET) 322 expiration_type: Union[Unset, GovernanceExpiryType] 323 if isinstance(_expiration_type, Unset): 324 expiration_type = UNSET 325 else: 326 expiration_type = GovernanceExpiryType(_expiration_type) 327 328 def _parse_expiration_days_after_completion(data: object) -> Union[None, Unset, int]: 329 if data is None: 330 return data 331 if isinstance(data, Unset): 332 return data 333 return cast(Union[None, Unset, int], data) 334 335 expiration_days_after_completion = _parse_expiration_days_after_completion( 336 d.pop("expirationDaysAfterCompletion", UNSET) 337 ) 338 339 def _parse_expiration_date(data: object) -> Union[None, Unset, datetime.datetime]: 340 if data is None: 341 return data 342 if isinstance(data, Unset): 343 return data 344 try: 345 if not isinstance(data, str): 346 raise TypeError() 347 expiration_date_type_0 = isoparse(data) 348 349 return expiration_date_type_0 350 except: # noqa: E722 351 pass 352 return cast(Union[None, Unset, datetime.datetime], data) 353 354 expiration_date = _parse_expiration_date(d.pop("expirationDate", UNSET)) 355 356 def _parse_supplemental_docs(data: object) -> Union[List["GovernanceFile"], None, Unset]: 357 if data is None: 358 return data 359 if isinstance(data, Unset): 360 return data 361 try: 362 if not isinstance(data, list): 363 raise TypeError() 364 supplemental_docs_type_0 = [] 365 _supplemental_docs_type_0 = data 366 for supplemental_docs_type_0_item_data in _supplemental_docs_type_0: 367 supplemental_docs_type_0_item = GovernanceFile.from_dict(supplemental_docs_type_0_item_data) 368 369 supplemental_docs_type_0.append(supplemental_docs_type_0_item) 370 371 return supplemental_docs_type_0 372 except: # noqa: E722 373 pass 374 return cast(Union[List["GovernanceFile"], None, Unset], data) 375 376 supplemental_docs = _parse_supplemental_docs(d.pop("supplementalDocs", UNSET)) 377 378 def _parse_file(data: object) -> Union["GovernanceFile", None, Unset]: 379 if data is None: 380 return data 381 if isinstance(data, Unset): 382 return data 383 try: 384 if not isinstance(data, dict): 385 raise TypeError() 386 file_type_1 = GovernanceFile.from_dict(data) 387 388 return file_type_1 389 except: # noqa: E722 390 pass 391 return cast(Union["GovernanceFile", None, Unset], data) 392 393 file = _parse_file(d.pop("file", UNSET)) 394 395 def _parse_authorship(data: object) -> Union[GovernanceScope, None, Unset]: 396 if data is None: 397 return data 398 if isinstance(data, Unset): 399 return data 400 try: 401 if not isinstance(data, str): 402 raise TypeError() 403 authorship_type_1 = GovernanceScope(data) 404 405 return authorship_type_1 406 except: # noqa: E722 407 pass 408 return cast(Union[GovernanceScope, None, Unset], data) 409 410 authorship = _parse_authorship(d.pop("authorship", UNSET)) 411 412 def _parse_verification_method(data: object) -> Union[GovernanceTrainingVerification, None, Unset]: 413 if data is None: 414 return data 415 if isinstance(data, Unset): 416 return data 417 try: 418 if not isinstance(data, str): 419 raise TypeError() 420 verification_method_type_1 = GovernanceTrainingVerification(data) 421 422 return verification_method_type_1 423 except: # noqa: E722 424 pass 425 return cast(Union[GovernanceTrainingVerification, None, Unset], data) 426 427 verification_method = _parse_verification_method(d.pop("verificationMethod", UNSET)) 428 429 def _parse_fulfillment_id(data: object) -> Union[None, Unset, str]: 430 if data is None: 431 return data 432 if isinstance(data, Unset): 433 return data 434 return cast(Union[None, Unset, str], data) 435 436 fulfillment_id = _parse_fulfillment_id(d.pop("fulfillmentId", UNSET)) 437 438 def _parse_fulfillment_date(data: object) -> Union[None, Unset, datetime.datetime]: 439 if data is None: 440 return data 441 if isinstance(data, Unset): 442 return data 443 try: 444 if not isinstance(data, str): 445 raise TypeError() 446 fulfillment_date_type_0 = isoparse(data) 447 448 return fulfillment_date_type_0 449 except: # noqa: E722 450 pass 451 return cast(Union[None, Unset, datetime.datetime], data) 452 453 fulfillment_date = _parse_fulfillment_date(d.pop("fulfillmentDate", UNSET)) 454 455 def _parse_fulfillment_file(data: object) -> Union[None, Unset, str]: 456 if data is None: 457 return data 458 if isinstance(data, Unset): 459 return data 460 return cast(Union[None, Unset, str], data) 461 462 fulfillment_file = _parse_fulfillment_file(d.pop("fulfillmentFile", UNSET)) 463 464 def _parse_fulfillment_path(data: object) -> Union[None, Unset, str]: 465 if data is None: 466 return data 467 if isinstance(data, Unset): 468 return data 469 return cast(Union[None, Unset, str], data) 470 471 fulfillment_path = _parse_fulfillment_path(d.pop("fulfillmentPath", UNSET)) 472 473 project_requirement = cls( 474 id=id, 475 name=name, 476 description=description, 477 type=type, 478 path=path, 479 supplemental_path=supplemental_path, 480 scope=scope, 481 contacts=contacts, 482 is_enacted=is_enacted, 483 is_project_configured=is_project_configured, 484 is_fulfilled=is_fulfilled, 485 acceptance=acceptance, 486 enactment_date=enactment_date, 487 expiration_type=expiration_type, 488 expiration_days_after_completion=expiration_days_after_completion, 489 expiration_date=expiration_date, 490 supplemental_docs=supplemental_docs, 491 file=file, 492 authorship=authorship, 493 verification_method=verification_method, 494 fulfillment_id=fulfillment_id, 495 fulfillment_date=fulfillment_date, 496 fulfillment_file=fulfillment_file, 497 fulfillment_path=fulfillment_path, 498 ) 499 500 project_requirement.additional_properties = d 501 return project_requirement
5class ProjectRole(str, Enum): 6 ADMIN = "ADMIN" 7 COLLABORATOR = "COLLABORATOR" 8 CONTRIBUTOR = "CONTRIBUTOR" 9 NONE = "NONE" 10 OWNER = "OWNER" 11 UNKNOWN = "UNKNOWN" 12 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 13 14 def __str__(self) -> str: 15 return str(self.value) 16 17 @classmethod 18 def _missing_(cls, number): 19 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
13@_attrs_define 14class ProjectSettings: 15 """ 16 Attributes: 17 budget_amount (int): Total allowed cost for the budget period 18 budget_period (BudgetPeriod): Time period associated with the budget amount 19 dragen_ami (Union[None, Unset, str]): AMI ID for the DRAGEN compute environment (if enabled) 20 enable_compute (Union[Unset, bool]): Enables the default compute environment Default: True. 21 enable_dragen (Union[Unset, bool]): Enables the DRAGEN compute environment Default: False. 22 enable_backup (Union[Unset, bool]): Enables the AWS Backup service for S3 Default: False. 23 enable_sftp (Union[Unset, bool]): Enables access to files over SFTP Default: False. 24 max_f1vcpu (Union[Unset, int]): Service quota limit for On Demand F1 instances Default: 0. 25 max_spot_vcpu (Union[Unset, int]): Service quota limit for Spot instances Default: 0. 26 max_gpuvcpu (Union[Unset, int]): Service quota limit for GPU Spot instances Default: 0. 27 retention_policy_days (Union[Unset, int]): Days to keep deleted datasets before being permanently erased 28 Default: 7. 29 temporary_storage_lifetime_days (Union[Unset, int]): Days to keep temporary storage space (workflow executor 30 cache) Default: 14. 31 service_connections (Union[Unset, List[str]]): List of service connections to enable 32 vpc_id (Union[None, Unset, str]): VPC that the compute environment will use Example: vpc-00000000000000000. 33 batch_subnets (Union[List[str], None, Unset]): List of subnets that the compute environment will use Example: 34 ['subnet-00000000000000000']. 35 sagemaker_subnets (Union[List[str], None, Unset]): List of subnets that the sagemaker instances will use 36 Example: ['subnet-00000000000000000']. 37 kms_arn (Union[None, Unset, str]): KMS Key ARN to encrypt S3 objects, if not provided, default bucket encryption 38 will be used 39 is_discoverable (Union[None, Unset, bool]): Enables the project to be discoverable by other users Default: 40 False. 41 is_shareable (Union[None, Unset, bool]): Enables the project to be shared with other projects Default: False. 42 """ 43 44 budget_amount: int 45 budget_period: BudgetPeriod 46 dragen_ami: Union[None, Unset, str] = UNSET 47 enable_compute: Union[Unset, bool] = True 48 enable_dragen: Union[Unset, bool] = False 49 enable_backup: Union[Unset, bool] = False 50 enable_sftp: Union[Unset, bool] = False 51 max_f1vcpu: Union[Unset, int] = 0 52 max_spot_vcpu: Union[Unset, int] = 0 53 max_gpuvcpu: Union[Unset, int] = 0 54 retention_policy_days: Union[Unset, int] = 7 55 temporary_storage_lifetime_days: Union[Unset, int] = 14 56 service_connections: Union[Unset, List[str]] = UNSET 57 vpc_id: Union[None, Unset, str] = UNSET 58 batch_subnets: Union[List[str], None, Unset] = UNSET 59 sagemaker_subnets: Union[List[str], None, Unset] = UNSET 60 kms_arn: Union[None, Unset, str] = UNSET 61 is_discoverable: Union[None, Unset, bool] = False 62 is_shareable: Union[None, Unset, bool] = False 63 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 64 65 def to_dict(self) -> Dict[str, Any]: 66 budget_amount = self.budget_amount 67 68 budget_period = self.budget_period.value 69 70 dragen_ami: Union[None, Unset, str] 71 if isinstance(self.dragen_ami, Unset): 72 dragen_ami = UNSET 73 else: 74 dragen_ami = self.dragen_ami 75 76 enable_compute = self.enable_compute 77 78 enable_dragen = self.enable_dragen 79 80 enable_backup = self.enable_backup 81 82 enable_sftp = self.enable_sftp 83 84 max_f1vcpu = self.max_f1vcpu 85 86 max_spot_vcpu = self.max_spot_vcpu 87 88 max_gpuvcpu = self.max_gpuvcpu 89 90 retention_policy_days = self.retention_policy_days 91 92 temporary_storage_lifetime_days = self.temporary_storage_lifetime_days 93 94 service_connections: Union[Unset, List[str]] = UNSET 95 if not isinstance(self.service_connections, Unset): 96 service_connections = self.service_connections 97 98 vpc_id: Union[None, Unset, str] 99 if isinstance(self.vpc_id, Unset): 100 vpc_id = UNSET 101 else: 102 vpc_id = self.vpc_id 103 104 batch_subnets: Union[List[str], None, Unset] 105 if isinstance(self.batch_subnets, Unset): 106 batch_subnets = UNSET 107 elif isinstance(self.batch_subnets, list): 108 batch_subnets = self.batch_subnets 109 110 else: 111 batch_subnets = self.batch_subnets 112 113 sagemaker_subnets: Union[List[str], None, Unset] 114 if isinstance(self.sagemaker_subnets, Unset): 115 sagemaker_subnets = UNSET 116 elif isinstance(self.sagemaker_subnets, list): 117 sagemaker_subnets = self.sagemaker_subnets 118 119 else: 120 sagemaker_subnets = self.sagemaker_subnets 121 122 kms_arn: Union[None, Unset, str] 123 if isinstance(self.kms_arn, Unset): 124 kms_arn = UNSET 125 else: 126 kms_arn = self.kms_arn 127 128 is_discoverable: Union[None, Unset, bool] 129 if isinstance(self.is_discoverable, Unset): 130 is_discoverable = UNSET 131 else: 132 is_discoverable = self.is_discoverable 133 134 is_shareable: Union[None, Unset, bool] 135 if isinstance(self.is_shareable, Unset): 136 is_shareable = UNSET 137 else: 138 is_shareable = self.is_shareable 139 140 field_dict: Dict[str, Any] = {} 141 field_dict.update(self.additional_properties) 142 field_dict.update( 143 { 144 "budgetAmount": budget_amount, 145 "budgetPeriod": budget_period, 146 } 147 ) 148 if dragen_ami is not UNSET: 149 field_dict["dragenAmi"] = dragen_ami 150 if enable_compute is not UNSET: 151 field_dict["enableCompute"] = enable_compute 152 if enable_dragen is not UNSET: 153 field_dict["enableDragen"] = enable_dragen 154 if enable_backup is not UNSET: 155 field_dict["enableBackup"] = enable_backup 156 if enable_sftp is not UNSET: 157 field_dict["enableSftp"] = enable_sftp 158 if max_f1vcpu is not UNSET: 159 field_dict["maxF1VCPU"] = max_f1vcpu 160 if max_spot_vcpu is not UNSET: 161 field_dict["maxSpotVCPU"] = max_spot_vcpu 162 if max_gpuvcpu is not UNSET: 163 field_dict["maxGPUVCPU"] = max_gpuvcpu 164 if retention_policy_days is not UNSET: 165 field_dict["retentionPolicyDays"] = retention_policy_days 166 if temporary_storage_lifetime_days is not UNSET: 167 field_dict["temporaryStorageLifetimeDays"] = temporary_storage_lifetime_days 168 if service_connections is not UNSET: 169 field_dict["serviceConnections"] = service_connections 170 if vpc_id is not UNSET: 171 field_dict["vpcId"] = vpc_id 172 if batch_subnets is not UNSET: 173 field_dict["batchSubnets"] = batch_subnets 174 if sagemaker_subnets is not UNSET: 175 field_dict["sagemakerSubnets"] = sagemaker_subnets 176 if kms_arn is not UNSET: 177 field_dict["kmsArn"] = kms_arn 178 if is_discoverable is not UNSET: 179 field_dict["isDiscoverable"] = is_discoverable 180 if is_shareable is not UNSET: 181 field_dict["isShareable"] = is_shareable 182 183 return field_dict 184 185 @classmethod 186 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 187 d = src_dict.copy() 188 budget_amount = d.pop("budgetAmount") 189 190 budget_period = BudgetPeriod(d.pop("budgetPeriod")) 191 192 def _parse_dragen_ami(data: object) -> Union[None, Unset, str]: 193 if data is None: 194 return data 195 if isinstance(data, Unset): 196 return data 197 return cast(Union[None, Unset, str], data) 198 199 dragen_ami = _parse_dragen_ami(d.pop("dragenAmi", UNSET)) 200 201 enable_compute = d.pop("enableCompute", UNSET) 202 203 enable_dragen = d.pop("enableDragen", UNSET) 204 205 enable_backup = d.pop("enableBackup", UNSET) 206 207 enable_sftp = d.pop("enableSftp", UNSET) 208 209 max_f1vcpu = d.pop("maxF1VCPU", UNSET) 210 211 max_spot_vcpu = d.pop("maxSpotVCPU", UNSET) 212 213 max_gpuvcpu = d.pop("maxGPUVCPU", UNSET) 214 215 retention_policy_days = d.pop("retentionPolicyDays", UNSET) 216 217 temporary_storage_lifetime_days = d.pop("temporaryStorageLifetimeDays", UNSET) 218 219 service_connections = cast(List[str], d.pop("serviceConnections", UNSET)) 220 221 def _parse_vpc_id(data: object) -> Union[None, Unset, str]: 222 if data is None: 223 return data 224 if isinstance(data, Unset): 225 return data 226 return cast(Union[None, Unset, str], data) 227 228 vpc_id = _parse_vpc_id(d.pop("vpcId", UNSET)) 229 230 def _parse_batch_subnets(data: object) -> Union[List[str], None, Unset]: 231 if data is None: 232 return data 233 if isinstance(data, Unset): 234 return data 235 try: 236 if not isinstance(data, list): 237 raise TypeError() 238 batch_subnets_type_0 = cast(List[str], data) 239 240 return batch_subnets_type_0 241 except: # noqa: E722 242 pass 243 return cast(Union[List[str], None, Unset], data) 244 245 batch_subnets = _parse_batch_subnets(d.pop("batchSubnets", UNSET)) 246 247 def _parse_sagemaker_subnets(data: object) -> Union[List[str], None, Unset]: 248 if data is None: 249 return data 250 if isinstance(data, Unset): 251 return data 252 try: 253 if not isinstance(data, list): 254 raise TypeError() 255 sagemaker_subnets_type_0 = cast(List[str], data) 256 257 return sagemaker_subnets_type_0 258 except: # noqa: E722 259 pass 260 return cast(Union[List[str], None, Unset], data) 261 262 sagemaker_subnets = _parse_sagemaker_subnets(d.pop("sagemakerSubnets", UNSET)) 263 264 def _parse_kms_arn(data: object) -> Union[None, Unset, str]: 265 if data is None: 266 return data 267 if isinstance(data, Unset): 268 return data 269 return cast(Union[None, Unset, str], data) 270 271 kms_arn = _parse_kms_arn(d.pop("kmsArn", UNSET)) 272 273 def _parse_is_discoverable(data: object) -> Union[None, Unset, bool]: 274 if data is None: 275 return data 276 if isinstance(data, Unset): 277 return data 278 return cast(Union[None, Unset, bool], data) 279 280 is_discoverable = _parse_is_discoverable(d.pop("isDiscoverable", UNSET)) 281 282 def _parse_is_shareable(data: object) -> Union[None, Unset, bool]: 283 if data is None: 284 return data 285 if isinstance(data, Unset): 286 return data 287 return cast(Union[None, Unset, bool], data) 288 289 is_shareable = _parse_is_shareable(d.pop("isShareable", UNSET)) 290 291 project_settings = cls( 292 budget_amount=budget_amount, 293 budget_period=budget_period, 294 dragen_ami=dragen_ami, 295 enable_compute=enable_compute, 296 enable_dragen=enable_dragen, 297 enable_backup=enable_backup, 298 enable_sftp=enable_sftp, 299 max_f1vcpu=max_f1vcpu, 300 max_spot_vcpu=max_spot_vcpu, 301 max_gpuvcpu=max_gpuvcpu, 302 retention_policy_days=retention_policy_days, 303 temporary_storage_lifetime_days=temporary_storage_lifetime_days, 304 service_connections=service_connections, 305 vpc_id=vpc_id, 306 batch_subnets=batch_subnets, 307 sagemaker_subnets=sagemaker_subnets, 308 kms_arn=kms_arn, 309 is_discoverable=is_discoverable, 310 is_shareable=is_shareable, 311 ) 312 313 project_settings.additional_properties = d 314 return project_settings 315 316 @property 317 def additional_keys(self) -> List[str]: 318 return list(self.additional_properties.keys())
Attributes:
- budget_amount (int): Total allowed cost for the budget period
- budget_period (BudgetPeriod): Time period associated with the budget amount
- dragen_ami (Union[None, Unset, str]): AMI ID for the DRAGEN compute environment (if enabled)
- enable_compute (Union[Unset, bool]): Enables the default compute environment Default: True.
- enable_dragen (Union[Unset, bool]): Enables the DRAGEN compute environment Default: False.
- enable_backup (Union[Unset, bool]): Enables the AWS Backup service for S3 Default: False.
- enable_sftp (Union[Unset, bool]): Enables access to files over SFTP Default: False.
- max_f1vcpu (Union[Unset, int]): Service quota limit for On Demand F1 instances Default: 0.
- max_spot_vcpu (Union[Unset, int]): Service quota limit for Spot instances Default: 0.
- max_gpuvcpu (Union[Unset, int]): Service quota limit for GPU Spot instances Default: 0.
- retention_policy_days (Union[Unset, int]): Days to keep deleted datasets before being permanently erased Default: 7.
- temporary_storage_lifetime_days (Union[Unset, int]): Days to keep temporary storage space (workflow executor cache) Default: 14.
- service_connections (Union[Unset, List[str]]): List of service connections to enable
- vpc_id (Union[None, Unset, str]): VPC that the compute environment will use Example: vpc-00000000000000000.
- batch_subnets (Union[List[str], None, Unset]): List of subnets that the compute environment will use Example: ['subnet-00000000000000000'].
- sagemaker_subnets (Union[List[str], None, Unset]): List of subnets that the sagemaker instances will use Example: ['subnet-00000000000000000'].
- kms_arn (Union[None, Unset, str]): KMS Key ARN to encrypt S3 objects, if not provided, default bucket encryption will be used
- is_discoverable (Union[None, Unset, bool]): Enables the project to be discoverable by other users Default: False.
- is_shareable (Union[None, Unset, bool]): Enables the project to be shared with other projects Default: False.
42def __init__(self, budget_amount, budget_period, dragen_ami=attr_dict['dragen_ami'].default, enable_compute=attr_dict['enable_compute'].default, enable_dragen=attr_dict['enable_dragen'].default, enable_backup=attr_dict['enable_backup'].default, enable_sftp=attr_dict['enable_sftp'].default, max_f1vcpu=attr_dict['max_f1vcpu'].default, max_spot_vcpu=attr_dict['max_spot_vcpu'].default, max_gpuvcpu=attr_dict['max_gpuvcpu'].default, retention_policy_days=attr_dict['retention_policy_days'].default, temporary_storage_lifetime_days=attr_dict['temporary_storage_lifetime_days'].default, service_connections=attr_dict['service_connections'].default, vpc_id=attr_dict['vpc_id'].default, batch_subnets=attr_dict['batch_subnets'].default, sagemaker_subnets=attr_dict['sagemaker_subnets'].default, kms_arn=attr_dict['kms_arn'].default, is_discoverable=attr_dict['is_discoverable'].default, is_shareable=attr_dict['is_shareable'].default): 43 self.budget_amount = budget_amount 44 self.budget_period = budget_period 45 self.dragen_ami = dragen_ami 46 self.enable_compute = enable_compute 47 self.enable_dragen = enable_dragen 48 self.enable_backup = enable_backup 49 self.enable_sftp = enable_sftp 50 self.max_f1vcpu = max_f1vcpu 51 self.max_spot_vcpu = max_spot_vcpu 52 self.max_gpuvcpu = max_gpuvcpu 53 self.retention_policy_days = retention_policy_days 54 self.temporary_storage_lifetime_days = temporary_storage_lifetime_days 55 self.service_connections = service_connections 56 self.vpc_id = vpc_id 57 self.batch_subnets = batch_subnets 58 self.sagemaker_subnets = sagemaker_subnets 59 self.kms_arn = kms_arn 60 self.is_discoverable = is_discoverable 61 self.is_shareable = is_shareable 62 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectSettings.
65 def to_dict(self) -> Dict[str, Any]: 66 budget_amount = self.budget_amount 67 68 budget_period = self.budget_period.value 69 70 dragen_ami: Union[None, Unset, str] 71 if isinstance(self.dragen_ami, Unset): 72 dragen_ami = UNSET 73 else: 74 dragen_ami = self.dragen_ami 75 76 enable_compute = self.enable_compute 77 78 enable_dragen = self.enable_dragen 79 80 enable_backup = self.enable_backup 81 82 enable_sftp = self.enable_sftp 83 84 max_f1vcpu = self.max_f1vcpu 85 86 max_spot_vcpu = self.max_spot_vcpu 87 88 max_gpuvcpu = self.max_gpuvcpu 89 90 retention_policy_days = self.retention_policy_days 91 92 temporary_storage_lifetime_days = self.temporary_storage_lifetime_days 93 94 service_connections: Union[Unset, List[str]] = UNSET 95 if not isinstance(self.service_connections, Unset): 96 service_connections = self.service_connections 97 98 vpc_id: Union[None, Unset, str] 99 if isinstance(self.vpc_id, Unset): 100 vpc_id = UNSET 101 else: 102 vpc_id = self.vpc_id 103 104 batch_subnets: Union[List[str], None, Unset] 105 if isinstance(self.batch_subnets, Unset): 106 batch_subnets = UNSET 107 elif isinstance(self.batch_subnets, list): 108 batch_subnets = self.batch_subnets 109 110 else: 111 batch_subnets = self.batch_subnets 112 113 sagemaker_subnets: Union[List[str], None, Unset] 114 if isinstance(self.sagemaker_subnets, Unset): 115 sagemaker_subnets = UNSET 116 elif isinstance(self.sagemaker_subnets, list): 117 sagemaker_subnets = self.sagemaker_subnets 118 119 else: 120 sagemaker_subnets = self.sagemaker_subnets 121 122 kms_arn: Union[None, Unset, str] 123 if isinstance(self.kms_arn, Unset): 124 kms_arn = UNSET 125 else: 126 kms_arn = self.kms_arn 127 128 is_discoverable: Union[None, Unset, bool] 129 if isinstance(self.is_discoverable, Unset): 130 is_discoverable = UNSET 131 else: 132 is_discoverable = self.is_discoverable 133 134 is_shareable: Union[None, Unset, bool] 135 if isinstance(self.is_shareable, Unset): 136 is_shareable = UNSET 137 else: 138 is_shareable = self.is_shareable 139 140 field_dict: Dict[str, Any] = {} 141 field_dict.update(self.additional_properties) 142 field_dict.update( 143 { 144 "budgetAmount": budget_amount, 145 "budgetPeriod": budget_period, 146 } 147 ) 148 if dragen_ami is not UNSET: 149 field_dict["dragenAmi"] = dragen_ami 150 if enable_compute is not UNSET: 151 field_dict["enableCompute"] = enable_compute 152 if enable_dragen is not UNSET: 153 field_dict["enableDragen"] = enable_dragen 154 if enable_backup is not UNSET: 155 field_dict["enableBackup"] = enable_backup 156 if enable_sftp is not UNSET: 157 field_dict["enableSftp"] = enable_sftp 158 if max_f1vcpu is not UNSET: 159 field_dict["maxF1VCPU"] = max_f1vcpu 160 if max_spot_vcpu is not UNSET: 161 field_dict["maxSpotVCPU"] = max_spot_vcpu 162 if max_gpuvcpu is not UNSET: 163 field_dict["maxGPUVCPU"] = max_gpuvcpu 164 if retention_policy_days is not UNSET: 165 field_dict["retentionPolicyDays"] = retention_policy_days 166 if temporary_storage_lifetime_days is not UNSET: 167 field_dict["temporaryStorageLifetimeDays"] = temporary_storage_lifetime_days 168 if service_connections is not UNSET: 169 field_dict["serviceConnections"] = service_connections 170 if vpc_id is not UNSET: 171 field_dict["vpcId"] = vpc_id 172 if batch_subnets is not UNSET: 173 field_dict["batchSubnets"] = batch_subnets 174 if sagemaker_subnets is not UNSET: 175 field_dict["sagemakerSubnets"] = sagemaker_subnets 176 if kms_arn is not UNSET: 177 field_dict["kmsArn"] = kms_arn 178 if is_discoverable is not UNSET: 179 field_dict["isDiscoverable"] = is_discoverable 180 if is_shareable is not UNSET: 181 field_dict["isShareable"] = is_shareable 182 183 return field_dict
185 @classmethod 186 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 187 d = src_dict.copy() 188 budget_amount = d.pop("budgetAmount") 189 190 budget_period = BudgetPeriod(d.pop("budgetPeriod")) 191 192 def _parse_dragen_ami(data: object) -> Union[None, Unset, str]: 193 if data is None: 194 return data 195 if isinstance(data, Unset): 196 return data 197 return cast(Union[None, Unset, str], data) 198 199 dragen_ami = _parse_dragen_ami(d.pop("dragenAmi", UNSET)) 200 201 enable_compute = d.pop("enableCompute", UNSET) 202 203 enable_dragen = d.pop("enableDragen", UNSET) 204 205 enable_backup = d.pop("enableBackup", UNSET) 206 207 enable_sftp = d.pop("enableSftp", UNSET) 208 209 max_f1vcpu = d.pop("maxF1VCPU", UNSET) 210 211 max_spot_vcpu = d.pop("maxSpotVCPU", UNSET) 212 213 max_gpuvcpu = d.pop("maxGPUVCPU", UNSET) 214 215 retention_policy_days = d.pop("retentionPolicyDays", UNSET) 216 217 temporary_storage_lifetime_days = d.pop("temporaryStorageLifetimeDays", UNSET) 218 219 service_connections = cast(List[str], d.pop("serviceConnections", UNSET)) 220 221 def _parse_vpc_id(data: object) -> Union[None, Unset, str]: 222 if data is None: 223 return data 224 if isinstance(data, Unset): 225 return data 226 return cast(Union[None, Unset, str], data) 227 228 vpc_id = _parse_vpc_id(d.pop("vpcId", UNSET)) 229 230 def _parse_batch_subnets(data: object) -> Union[List[str], None, Unset]: 231 if data is None: 232 return data 233 if isinstance(data, Unset): 234 return data 235 try: 236 if not isinstance(data, list): 237 raise TypeError() 238 batch_subnets_type_0 = cast(List[str], data) 239 240 return batch_subnets_type_0 241 except: # noqa: E722 242 pass 243 return cast(Union[List[str], None, Unset], data) 244 245 batch_subnets = _parse_batch_subnets(d.pop("batchSubnets", UNSET)) 246 247 def _parse_sagemaker_subnets(data: object) -> Union[List[str], None, Unset]: 248 if data is None: 249 return data 250 if isinstance(data, Unset): 251 return data 252 try: 253 if not isinstance(data, list): 254 raise TypeError() 255 sagemaker_subnets_type_0 = cast(List[str], data) 256 257 return sagemaker_subnets_type_0 258 except: # noqa: E722 259 pass 260 return cast(Union[List[str], None, Unset], data) 261 262 sagemaker_subnets = _parse_sagemaker_subnets(d.pop("sagemakerSubnets", UNSET)) 263 264 def _parse_kms_arn(data: object) -> Union[None, Unset, str]: 265 if data is None: 266 return data 267 if isinstance(data, Unset): 268 return data 269 return cast(Union[None, Unset, str], data) 270 271 kms_arn = _parse_kms_arn(d.pop("kmsArn", UNSET)) 272 273 def _parse_is_discoverable(data: object) -> Union[None, Unset, bool]: 274 if data is None: 275 return data 276 if isinstance(data, Unset): 277 return data 278 return cast(Union[None, Unset, bool], data) 279 280 is_discoverable = _parse_is_discoverable(d.pop("isDiscoverable", UNSET)) 281 282 def _parse_is_shareable(data: object) -> Union[None, Unset, bool]: 283 if data is None: 284 return data 285 if isinstance(data, Unset): 286 return data 287 return cast(Union[None, Unset, bool], data) 288 289 is_shareable = _parse_is_shareable(d.pop("isShareable", UNSET)) 290 291 project_settings = cls( 292 budget_amount=budget_amount, 293 budget_period=budget_period, 294 dragen_ami=dragen_ami, 295 enable_compute=enable_compute, 296 enable_dragen=enable_dragen, 297 enable_backup=enable_backup, 298 enable_sftp=enable_sftp, 299 max_f1vcpu=max_f1vcpu, 300 max_spot_vcpu=max_spot_vcpu, 301 max_gpuvcpu=max_gpuvcpu, 302 retention_policy_days=retention_policy_days, 303 temporary_storage_lifetime_days=temporary_storage_lifetime_days, 304 service_connections=service_connections, 305 vpc_id=vpc_id, 306 batch_subnets=batch_subnets, 307 sagemaker_subnets=sagemaker_subnets, 308 kms_arn=kms_arn, 309 is_discoverable=is_discoverable, 310 is_shareable=is_shareable, 311 ) 312 313 project_settings.additional_properties = d 314 return project_settings
12@_attrs_define 13class ProjectUser: 14 """ 15 Attributes: 16 name (str): 17 username (str): 18 organization (str): 19 department (str): 20 email (str): 21 role (ProjectRole): 22 """ 23 24 name: str 25 username: str 26 organization: str 27 department: str 28 email: str 29 role: ProjectRole 30 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 31 32 def to_dict(self) -> Dict[str, Any]: 33 name = self.name 34 35 username = self.username 36 37 organization = self.organization 38 39 department = self.department 40 41 email = self.email 42 43 role = self.role.value 44 45 field_dict: Dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "name": name, 50 "username": username, 51 "organization": organization, 52 "department": department, 53 "email": email, 54 "role": role, 55 } 56 ) 57 58 return field_dict 59 60 @classmethod 61 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 62 d = src_dict.copy() 63 name = d.pop("name") 64 65 username = d.pop("username") 66 67 organization = d.pop("organization") 68 69 department = d.pop("department") 70 71 email = d.pop("email") 72 73 role = ProjectRole(d.pop("role")) 74 75 project_user = cls( 76 name=name, 77 username=username, 78 organization=organization, 79 department=department, 80 email=email, 81 role=role, 82 ) 83 84 project_user.additional_properties = d 85 return project_user 86 87 @property 88 def additional_keys(self) -> List[str]: 89 return list(self.additional_properties.keys())
Attributes:
- name (str):
- username (str):
- organization (str):
- department (str):
- email (str):
- role (ProjectRole):
29def __init__(self, name, username, organization, department, email, role): 30 self.name = name 31 self.username = username 32 self.organization = organization 33 self.department = department 34 self.email = email 35 self.role = role 36 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ProjectUser.
32 def to_dict(self) -> Dict[str, Any]: 33 name = self.name 34 35 username = self.username 36 37 organization = self.organization 38 39 department = self.department 40 41 email = self.email 42 43 role = self.role.value 44 45 field_dict: Dict[str, Any] = {} 46 field_dict.update(self.additional_properties) 47 field_dict.update( 48 { 49 "name": name, 50 "username": username, 51 "organization": organization, 52 "department": department, 53 "email": email, 54 "role": role, 55 } 56 ) 57 58 return field_dict
60 @classmethod 61 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 62 d = src_dict.copy() 63 name = d.pop("name") 64 65 username = d.pop("username") 66 67 organization = d.pop("organization") 68 69 department = d.pop("department") 70 71 email = d.pop("email") 72 73 role = ProjectRole(d.pop("role")) 74 75 project_user = cls( 76 name=name, 77 username=username, 78 organization=organization, 79 department=department, 80 email=email, 81 role=role, 82 ) 83 84 project_user.additional_properties = d 85 return project_user
16@_attrs_define 17class Reference: 18 """ 19 Attributes: 20 id (str): 21 name (str): 22 description (str): 23 type (str): 24 files (List['FileEntry']): 25 created_by (str): 26 created_at (datetime.datetime): 27 """ 28 29 id: str 30 name: str 31 description: str 32 type: str 33 files: List["FileEntry"] 34 created_by: str 35 created_at: datetime.datetime 36 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 37 38 def to_dict(self) -> Dict[str, Any]: 39 id = self.id 40 41 name = self.name 42 43 description = self.description 44 45 type = self.type 46 47 files = [] 48 for files_item_data in self.files: 49 files_item = files_item_data.to_dict() 50 files.append(files_item) 51 52 created_by = self.created_by 53 54 created_at = self.created_at.isoformat() 55 56 field_dict: Dict[str, Any] = {} 57 field_dict.update(self.additional_properties) 58 field_dict.update( 59 { 60 "id": id, 61 "name": name, 62 "description": description, 63 "type": type, 64 "files": files, 65 "createdBy": created_by, 66 "createdAt": created_at, 67 } 68 ) 69 70 return field_dict 71 72 @classmethod 73 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 74 from ..models.file_entry import FileEntry 75 76 d = src_dict.copy() 77 id = d.pop("id") 78 79 name = d.pop("name") 80 81 description = d.pop("description") 82 83 type = d.pop("type") 84 85 files = [] 86 _files = d.pop("files") 87 for files_item_data in _files: 88 files_item = FileEntry.from_dict(files_item_data) 89 90 files.append(files_item) 91 92 created_by = d.pop("createdBy") 93 94 created_at = isoparse(d.pop("createdAt")) 95 96 reference = cls( 97 id=id, 98 name=name, 99 description=description, 100 type=type, 101 files=files, 102 created_by=created_by, 103 created_at=created_at, 104 ) 105 106 reference.additional_properties = d 107 return reference 108 109 @property 110 def additional_keys(self) -> List[str]: 111 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- description (str):
- type (str):
- files (List['FileEntry']):
- created_by (str):
- created_at (datetime.datetime):
30def __init__(self, id, name, description, type, files, created_by, created_at): 31 self.id = id 32 self.name = name 33 self.description = description 34 self.type = type 35 self.files = files 36 self.created_by = created_by 37 self.created_at = created_at 38 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Reference.
38 def to_dict(self) -> Dict[str, Any]: 39 id = self.id 40 41 name = self.name 42 43 description = self.description 44 45 type = self.type 46 47 files = [] 48 for files_item_data in self.files: 49 files_item = files_item_data.to_dict() 50 files.append(files_item) 51 52 created_by = self.created_by 53 54 created_at = self.created_at.isoformat() 55 56 field_dict: Dict[str, Any] = {} 57 field_dict.update(self.additional_properties) 58 field_dict.update( 59 { 60 "id": id, 61 "name": name, 62 "description": description, 63 "type": type, 64 "files": files, 65 "createdBy": created_by, 66 "createdAt": created_at, 67 } 68 ) 69 70 return field_dict
72 @classmethod 73 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 74 from ..models.file_entry import FileEntry 75 76 d = src_dict.copy() 77 id = d.pop("id") 78 79 name = d.pop("name") 80 81 description = d.pop("description") 82 83 type = d.pop("type") 84 85 files = [] 86 _files = d.pop("files") 87 for files_item_data in _files: 88 files_item = FileEntry.from_dict(files_item_data) 89 90 files.append(files_item) 91 92 created_by = d.pop("createdBy") 93 94 created_at = isoparse(d.pop("createdAt")) 95 96 reference = cls( 97 id=id, 98 name=name, 99 description=description, 100 type=type, 101 files=files, 102 created_by=created_by, 103 created_at=created_at, 104 ) 105 106 reference.additional_properties = d 107 return reference
14@_attrs_define 15class ReferenceType: 16 """ 17 Attributes: 18 name (str): 19 description (str): 20 directory (str): 21 validation (List['ReferenceTypeValidationItem']): 22 """ 23 24 name: str 25 description: str 26 directory: str 27 validation: List["ReferenceTypeValidationItem"] 28 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 29 30 def to_dict(self) -> Dict[str, Any]: 31 name = self.name 32 33 description = self.description 34 35 directory = self.directory 36 37 validation = [] 38 for validation_item_data in self.validation: 39 validation_item = validation_item_data.to_dict() 40 validation.append(validation_item) 41 42 field_dict: Dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "name": name, 47 "description": description, 48 "directory": directory, 49 "validation": validation, 50 } 51 ) 52 53 return field_dict 54 55 @classmethod 56 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 57 from ..models.reference_type_validation_item import ReferenceTypeValidationItem 58 59 d = src_dict.copy() 60 name = d.pop("name") 61 62 description = d.pop("description") 63 64 directory = d.pop("directory") 65 66 validation = [] 67 _validation = d.pop("validation") 68 for validation_item_data in _validation: 69 validation_item = ReferenceTypeValidationItem.from_dict(validation_item_data) 70 71 validation.append(validation_item) 72 73 reference_type = cls( 74 name=name, 75 description=description, 76 directory=directory, 77 validation=validation, 78 ) 79 80 reference_type.additional_properties = d 81 return reference_type 82 83 @property 84 def additional_keys(self) -> List[str]: 85 return list(self.additional_properties.keys())
Attributes:
- name (str):
- description (str):
- directory (str):
- validation (List['ReferenceTypeValidationItem']):
27def __init__(self, name, description, directory, validation): 28 self.name = name 29 self.description = description 30 self.directory = directory 31 self.validation = validation 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ReferenceType.
30 def to_dict(self) -> Dict[str, Any]: 31 name = self.name 32 33 description = self.description 34 35 directory = self.directory 36 37 validation = [] 38 for validation_item_data in self.validation: 39 validation_item = validation_item_data.to_dict() 40 validation.append(validation_item) 41 42 field_dict: Dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "name": name, 47 "description": description, 48 "directory": directory, 49 "validation": validation, 50 } 51 ) 52 53 return field_dict
55 @classmethod 56 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 57 from ..models.reference_type_validation_item import ReferenceTypeValidationItem 58 59 d = src_dict.copy() 60 name = d.pop("name") 61 62 description = d.pop("description") 63 64 directory = d.pop("directory") 65 66 validation = [] 67 _validation = d.pop("validation") 68 for validation_item_data in _validation: 69 validation_item = ReferenceTypeValidationItem.from_dict(validation_item_data) 70 71 validation.append(validation_item) 72 73 reference_type = cls( 74 name=name, 75 description=description, 76 directory=directory, 77 validation=validation, 78 ) 79 80 reference_type.additional_properties = d 81 return reference_type
10@_attrs_define 11class ReferenceTypeValidationItem: 12 """ """ 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 reference_type_validation_item = cls() 27 28 reference_type_validation_item.additional_properties = d 29 return reference_type_validation_item 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
5class RepositoryType(str, Enum): 6 AWS = "AWS" 7 GITHUB_PRIVATE = "GITHUB_PRIVATE" 8 GITHUB_PUBLIC = "GITHUB_PUBLIC" 9 NONE = "NONE" 10 UNKNOWN = "UNKNOWN" 11 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 12 13 def __str__(self) -> str: 14 return str(self.value) 15 16 @classmethod 17 def _missing_(cls, number): 18 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
5class RequestStatus(str, Enum): 6 ACCEPTED = "ACCEPTED" 7 DENIED = "DENIED" 8 PENDING = "PENDING" 9 UNKNOWN = "UNKNOWN" 10 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 11 12 def __str__(self) -> str: 13 return str(self.value) 14 15 @classmethod 16 def _missing_(cls, number): 17 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
14@_attrs_define 15class RequirementFulfillmentInput: 16 """ 17 Attributes: 18 file (Union[None, Unset, str]): 19 completed_on (Union[None, Unset, datetime.datetime]): If not provided, defaults to the current instant 20 """ 21 22 file: Union[None, Unset, str] = UNSET 23 completed_on: Union[None, Unset, datetime.datetime] = UNSET 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 file: Union[None, Unset, str] 28 if isinstance(self.file, Unset): 29 file = UNSET 30 else: 31 file = self.file 32 33 completed_on: Union[None, Unset, str] 34 if isinstance(self.completed_on, Unset): 35 completed_on = UNSET 36 elif isinstance(self.completed_on, datetime.datetime): 37 completed_on = self.completed_on.isoformat() 38 else: 39 completed_on = self.completed_on 40 41 field_dict: Dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update({}) 44 if file is not UNSET: 45 field_dict["file"] = file 46 if completed_on is not UNSET: 47 field_dict["completedOn"] = completed_on 48 49 return field_dict 50 51 @classmethod 52 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 53 d = src_dict.copy() 54 55 def _parse_file(data: object) -> Union[None, Unset, str]: 56 if data is None: 57 return data 58 if isinstance(data, Unset): 59 return data 60 return cast(Union[None, Unset, str], data) 61 62 file = _parse_file(d.pop("file", UNSET)) 63 64 def _parse_completed_on(data: object) -> Union[None, Unset, datetime.datetime]: 65 if data is None: 66 return data 67 if isinstance(data, Unset): 68 return data 69 try: 70 if not isinstance(data, str): 71 raise TypeError() 72 completed_on_type_0 = isoparse(data) 73 74 return completed_on_type_0 75 except: # noqa: E722 76 pass 77 return cast(Union[None, Unset, datetime.datetime], data) 78 79 completed_on = _parse_completed_on(d.pop("completedOn", UNSET)) 80 81 requirement_fulfillment_input = cls( 82 file=file, 83 completed_on=completed_on, 84 ) 85 86 requirement_fulfillment_input.additional_properties = d 87 return requirement_fulfillment_input 88 89 @property 90 def additional_keys(self) -> List[str]: 91 return list(self.additional_properties.keys())
Attributes:
- file (Union[None, Unset, str]):
- completed_on (Union[None, Unset, datetime.datetime]): If not provided, defaults to the current instant
25def __init__(self, file=attr_dict['file'].default, completed_on=attr_dict['completed_on'].default): 26 self.file = file 27 self.completed_on = completed_on 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class RequirementFulfillmentInput.
26 def to_dict(self) -> Dict[str, Any]: 27 file: Union[None, Unset, str] 28 if isinstance(self.file, Unset): 29 file = UNSET 30 else: 31 file = self.file 32 33 completed_on: Union[None, Unset, str] 34 if isinstance(self.completed_on, Unset): 35 completed_on = UNSET 36 elif isinstance(self.completed_on, datetime.datetime): 37 completed_on = self.completed_on.isoformat() 38 else: 39 completed_on = self.completed_on 40 41 field_dict: Dict[str, Any] = {} 42 field_dict.update(self.additional_properties) 43 field_dict.update({}) 44 if file is not UNSET: 45 field_dict["file"] = file 46 if completed_on is not UNSET: 47 field_dict["completedOn"] = completed_on 48 49 return field_dict
51 @classmethod 52 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 53 d = src_dict.copy() 54 55 def _parse_file(data: object) -> Union[None, Unset, str]: 56 if data is None: 57 return data 58 if isinstance(data, Unset): 59 return data 60 return cast(Union[None, Unset, str], data) 61 62 file = _parse_file(d.pop("file", UNSET)) 63 64 def _parse_completed_on(data: object) -> Union[None, Unset, datetime.datetime]: 65 if data is None: 66 return data 67 if isinstance(data, Unset): 68 return data 69 try: 70 if not isinstance(data, str): 71 raise TypeError() 72 completed_on_type_0 = isoparse(data) 73 74 return completed_on_type_0 75 except: # noqa: E722 76 pass 77 return cast(Union[None, Unset, datetime.datetime], data) 78 79 completed_on = _parse_completed_on(d.pop("completedOn", UNSET)) 80 81 requirement_fulfillment_input = cls( 82 file=file, 83 completed_on=completed_on, 84 ) 85 86 requirement_fulfillment_input.additional_properties = d 87 return requirement_fulfillment_input
22@_attrs_define 23class RequirementInput: 24 """ 25 Attributes: 26 name (str): 27 description (str): 28 type (GovernanceType): The types of governance requirements that can be enforced 29 scope (GovernanceScope): The levels at which governance requirements can be enforced 30 contact_ids (List[str]): 31 expiration (GovernanceExpiry): 32 project_id (Union[None, Unset, str]): 33 acceptance (Union[GovernanceScope, None, Unset]): 34 enactment_date (Union[None, Unset, datetime.datetime]): 35 supplemental_docs (Union[List['GovernanceFile'], None, Unset]): 36 file (Union['GovernanceFile', None, Unset]): 37 authorship (Union[GovernanceScope, None, Unset]): 38 verification_method (Union[GovernanceTrainingVerification, None, Unset]): 39 """ 40 41 name: str 42 description: str 43 type: GovernanceType 44 scope: GovernanceScope 45 contact_ids: List[str] 46 expiration: "GovernanceExpiry" 47 project_id: Union[None, Unset, str] = UNSET 48 acceptance: Union[GovernanceScope, None, Unset] = UNSET 49 enactment_date: Union[None, Unset, datetime.datetime] = UNSET 50 supplemental_docs: Union[List["GovernanceFile"], None, Unset] = UNSET 51 file: Union["GovernanceFile", None, Unset] = UNSET 52 authorship: Union[GovernanceScope, None, Unset] = UNSET 53 verification_method: Union[GovernanceTrainingVerification, None, Unset] = UNSET 54 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 55 56 def to_dict(self) -> Dict[str, Any]: 57 from ..models.governance_file import GovernanceFile 58 59 name = self.name 60 61 description = self.description 62 63 type = self.type.value 64 65 scope = self.scope.value 66 67 contact_ids = self.contact_ids 68 69 expiration = self.expiration.to_dict() 70 71 project_id: Union[None, Unset, str] 72 if isinstance(self.project_id, Unset): 73 project_id = UNSET 74 else: 75 project_id = self.project_id 76 77 acceptance: Union[None, Unset, str] 78 if isinstance(self.acceptance, Unset): 79 acceptance = UNSET 80 elif isinstance(self.acceptance, GovernanceScope): 81 acceptance = self.acceptance.value 82 else: 83 acceptance = self.acceptance 84 85 enactment_date: Union[None, Unset, str] 86 if isinstance(self.enactment_date, Unset): 87 enactment_date = UNSET 88 elif isinstance(self.enactment_date, datetime.datetime): 89 enactment_date = self.enactment_date.isoformat() 90 else: 91 enactment_date = self.enactment_date 92 93 supplemental_docs: Union[List[Dict[str, Any]], None, Unset] 94 if isinstance(self.supplemental_docs, Unset): 95 supplemental_docs = UNSET 96 elif isinstance(self.supplemental_docs, list): 97 supplemental_docs = [] 98 for supplemental_docs_type_0_item_data in self.supplemental_docs: 99 supplemental_docs_type_0_item = supplemental_docs_type_0_item_data.to_dict() 100 supplemental_docs.append(supplemental_docs_type_0_item) 101 102 else: 103 supplemental_docs = self.supplemental_docs 104 105 file: Union[Dict[str, Any], None, Unset] 106 if isinstance(self.file, Unset): 107 file = UNSET 108 elif isinstance(self.file, GovernanceFile): 109 file = self.file.to_dict() 110 else: 111 file = self.file 112 113 authorship: Union[None, Unset, str] 114 if isinstance(self.authorship, Unset): 115 authorship = UNSET 116 elif isinstance(self.authorship, GovernanceScope): 117 authorship = self.authorship.value 118 else: 119 authorship = self.authorship 120 121 verification_method: Union[None, Unset, str] 122 if isinstance(self.verification_method, Unset): 123 verification_method = UNSET 124 elif isinstance(self.verification_method, GovernanceTrainingVerification): 125 verification_method = self.verification_method.value 126 else: 127 verification_method = self.verification_method 128 129 field_dict: Dict[str, Any] = {} 130 field_dict.update(self.additional_properties) 131 field_dict.update( 132 { 133 "name": name, 134 "description": description, 135 "type": type, 136 "scope": scope, 137 "contactIds": contact_ids, 138 "expiration": expiration, 139 } 140 ) 141 if project_id is not UNSET: 142 field_dict["projectId"] = project_id 143 if acceptance is not UNSET: 144 field_dict["acceptance"] = acceptance 145 if enactment_date is not UNSET: 146 field_dict["enactmentDate"] = enactment_date 147 if supplemental_docs is not UNSET: 148 field_dict["supplementalDocs"] = supplemental_docs 149 if file is not UNSET: 150 field_dict["file"] = file 151 if authorship is not UNSET: 152 field_dict["authorship"] = authorship 153 if verification_method is not UNSET: 154 field_dict["verificationMethod"] = verification_method 155 156 return field_dict 157 158 @classmethod 159 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 160 from ..models.governance_expiry import GovernanceExpiry 161 from ..models.governance_file import GovernanceFile 162 163 d = src_dict.copy() 164 name = d.pop("name") 165 166 description = d.pop("description") 167 168 type = GovernanceType(d.pop("type")) 169 170 scope = GovernanceScope(d.pop("scope")) 171 172 contact_ids = cast(List[str], d.pop("contactIds")) 173 174 expiration = GovernanceExpiry.from_dict(d.pop("expiration")) 175 176 def _parse_project_id(data: object) -> Union[None, Unset, str]: 177 if data is None: 178 return data 179 if isinstance(data, Unset): 180 return data 181 return cast(Union[None, Unset, str], data) 182 183 project_id = _parse_project_id(d.pop("projectId", UNSET)) 184 185 def _parse_acceptance(data: object) -> Union[GovernanceScope, None, Unset]: 186 if data is None: 187 return data 188 if isinstance(data, Unset): 189 return data 190 try: 191 if not isinstance(data, str): 192 raise TypeError() 193 acceptance_type_1 = GovernanceScope(data) 194 195 return acceptance_type_1 196 except: # noqa: E722 197 pass 198 return cast(Union[GovernanceScope, None, Unset], data) 199 200 acceptance = _parse_acceptance(d.pop("acceptance", UNSET)) 201 202 def _parse_enactment_date(data: object) -> Union[None, Unset, datetime.datetime]: 203 if data is None: 204 return data 205 if isinstance(data, Unset): 206 return data 207 try: 208 if not isinstance(data, str): 209 raise TypeError() 210 enactment_date_type_0 = isoparse(data) 211 212 return enactment_date_type_0 213 except: # noqa: E722 214 pass 215 return cast(Union[None, Unset, datetime.datetime], data) 216 217 enactment_date = _parse_enactment_date(d.pop("enactmentDate", UNSET)) 218 219 def _parse_supplemental_docs(data: object) -> Union[List["GovernanceFile"], None, Unset]: 220 if data is None: 221 return data 222 if isinstance(data, Unset): 223 return data 224 try: 225 if not isinstance(data, list): 226 raise TypeError() 227 supplemental_docs_type_0 = [] 228 _supplemental_docs_type_0 = data 229 for supplemental_docs_type_0_item_data in _supplemental_docs_type_0: 230 supplemental_docs_type_0_item = GovernanceFile.from_dict(supplemental_docs_type_0_item_data) 231 232 supplemental_docs_type_0.append(supplemental_docs_type_0_item) 233 234 return supplemental_docs_type_0 235 except: # noqa: E722 236 pass 237 return cast(Union[List["GovernanceFile"], None, Unset], data) 238 239 supplemental_docs = _parse_supplemental_docs(d.pop("supplementalDocs", UNSET)) 240 241 def _parse_file(data: object) -> Union["GovernanceFile", None, Unset]: 242 if data is None: 243 return data 244 if isinstance(data, Unset): 245 return data 246 try: 247 if not isinstance(data, dict): 248 raise TypeError() 249 file_type_1 = GovernanceFile.from_dict(data) 250 251 return file_type_1 252 except: # noqa: E722 253 pass 254 return cast(Union["GovernanceFile", None, Unset], data) 255 256 file = _parse_file(d.pop("file", UNSET)) 257 258 def _parse_authorship(data: object) -> Union[GovernanceScope, None, Unset]: 259 if data is None: 260 return data 261 if isinstance(data, Unset): 262 return data 263 try: 264 if not isinstance(data, str): 265 raise TypeError() 266 authorship_type_1 = GovernanceScope(data) 267 268 return authorship_type_1 269 except: # noqa: E722 270 pass 271 return cast(Union[GovernanceScope, None, Unset], data) 272 273 authorship = _parse_authorship(d.pop("authorship", UNSET)) 274 275 def _parse_verification_method(data: object) -> Union[GovernanceTrainingVerification, None, Unset]: 276 if data is None: 277 return data 278 if isinstance(data, Unset): 279 return data 280 try: 281 if not isinstance(data, str): 282 raise TypeError() 283 verification_method_type_1 = GovernanceTrainingVerification(data) 284 285 return verification_method_type_1 286 except: # noqa: E722 287 pass 288 return cast(Union[GovernanceTrainingVerification, None, Unset], data) 289 290 verification_method = _parse_verification_method(d.pop("verificationMethod", UNSET)) 291 292 requirement_input = cls( 293 name=name, 294 description=description, 295 type=type, 296 scope=scope, 297 contact_ids=contact_ids, 298 expiration=expiration, 299 project_id=project_id, 300 acceptance=acceptance, 301 enactment_date=enactment_date, 302 supplemental_docs=supplemental_docs, 303 file=file, 304 authorship=authorship, 305 verification_method=verification_method, 306 ) 307 308 requirement_input.additional_properties = d 309 return requirement_input 310 311 @property 312 def additional_keys(self) -> List[str]: 313 return list(self.additional_properties.keys())
Attributes:
- name (str):
- description (str):
- type (GovernanceType): The types of governance requirements that can be enforced
- scope (GovernanceScope): The levels at which governance requirements can be enforced
- contact_ids (List[str]):
- expiration (GovernanceExpiry):
- project_id (Union[None, Unset, str]):
- acceptance (Union[GovernanceScope, None, Unset]):
- enactment_date (Union[None, Unset, datetime.datetime]):
- supplemental_docs (Union[List['GovernanceFile'], None, Unset]):
- file (Union['GovernanceFile', None, Unset]):
- authorship (Union[GovernanceScope, None, Unset]):
- verification_method (Union[GovernanceTrainingVerification, None, Unset]):
36def __init__(self, name, description, type, scope, contact_ids, expiration, project_id=attr_dict['project_id'].default, acceptance=attr_dict['acceptance'].default, enactment_date=attr_dict['enactment_date'].default, supplemental_docs=attr_dict['supplemental_docs'].default, file=attr_dict['file'].default, authorship=attr_dict['authorship'].default, verification_method=attr_dict['verification_method'].default): 37 self.name = name 38 self.description = description 39 self.type = type 40 self.scope = scope 41 self.contact_ids = contact_ids 42 self.expiration = expiration 43 self.project_id = project_id 44 self.acceptance = acceptance 45 self.enactment_date = enactment_date 46 self.supplemental_docs = supplemental_docs 47 self.file = file 48 self.authorship = authorship 49 self.verification_method = verification_method 50 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class RequirementInput.
56 def to_dict(self) -> Dict[str, Any]: 57 from ..models.governance_file import GovernanceFile 58 59 name = self.name 60 61 description = self.description 62 63 type = self.type.value 64 65 scope = self.scope.value 66 67 contact_ids = self.contact_ids 68 69 expiration = self.expiration.to_dict() 70 71 project_id: Union[None, Unset, str] 72 if isinstance(self.project_id, Unset): 73 project_id = UNSET 74 else: 75 project_id = self.project_id 76 77 acceptance: Union[None, Unset, str] 78 if isinstance(self.acceptance, Unset): 79 acceptance = UNSET 80 elif isinstance(self.acceptance, GovernanceScope): 81 acceptance = self.acceptance.value 82 else: 83 acceptance = self.acceptance 84 85 enactment_date: Union[None, Unset, str] 86 if isinstance(self.enactment_date, Unset): 87 enactment_date = UNSET 88 elif isinstance(self.enactment_date, datetime.datetime): 89 enactment_date = self.enactment_date.isoformat() 90 else: 91 enactment_date = self.enactment_date 92 93 supplemental_docs: Union[List[Dict[str, Any]], None, Unset] 94 if isinstance(self.supplemental_docs, Unset): 95 supplemental_docs = UNSET 96 elif isinstance(self.supplemental_docs, list): 97 supplemental_docs = [] 98 for supplemental_docs_type_0_item_data in self.supplemental_docs: 99 supplemental_docs_type_0_item = supplemental_docs_type_0_item_data.to_dict() 100 supplemental_docs.append(supplemental_docs_type_0_item) 101 102 else: 103 supplemental_docs = self.supplemental_docs 104 105 file: Union[Dict[str, Any], None, Unset] 106 if isinstance(self.file, Unset): 107 file = UNSET 108 elif isinstance(self.file, GovernanceFile): 109 file = self.file.to_dict() 110 else: 111 file = self.file 112 113 authorship: Union[None, Unset, str] 114 if isinstance(self.authorship, Unset): 115 authorship = UNSET 116 elif isinstance(self.authorship, GovernanceScope): 117 authorship = self.authorship.value 118 else: 119 authorship = self.authorship 120 121 verification_method: Union[None, Unset, str] 122 if isinstance(self.verification_method, Unset): 123 verification_method = UNSET 124 elif isinstance(self.verification_method, GovernanceTrainingVerification): 125 verification_method = self.verification_method.value 126 else: 127 verification_method = self.verification_method 128 129 field_dict: Dict[str, Any] = {} 130 field_dict.update(self.additional_properties) 131 field_dict.update( 132 { 133 "name": name, 134 "description": description, 135 "type": type, 136 "scope": scope, 137 "contactIds": contact_ids, 138 "expiration": expiration, 139 } 140 ) 141 if project_id is not UNSET: 142 field_dict["projectId"] = project_id 143 if acceptance is not UNSET: 144 field_dict["acceptance"] = acceptance 145 if enactment_date is not UNSET: 146 field_dict["enactmentDate"] = enactment_date 147 if supplemental_docs is not UNSET: 148 field_dict["supplementalDocs"] = supplemental_docs 149 if file is not UNSET: 150 field_dict["file"] = file 151 if authorship is not UNSET: 152 field_dict["authorship"] = authorship 153 if verification_method is not UNSET: 154 field_dict["verificationMethod"] = verification_method 155 156 return field_dict
158 @classmethod 159 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 160 from ..models.governance_expiry import GovernanceExpiry 161 from ..models.governance_file import GovernanceFile 162 163 d = src_dict.copy() 164 name = d.pop("name") 165 166 description = d.pop("description") 167 168 type = GovernanceType(d.pop("type")) 169 170 scope = GovernanceScope(d.pop("scope")) 171 172 contact_ids = cast(List[str], d.pop("contactIds")) 173 174 expiration = GovernanceExpiry.from_dict(d.pop("expiration")) 175 176 def _parse_project_id(data: object) -> Union[None, Unset, str]: 177 if data is None: 178 return data 179 if isinstance(data, Unset): 180 return data 181 return cast(Union[None, Unset, str], data) 182 183 project_id = _parse_project_id(d.pop("projectId", UNSET)) 184 185 def _parse_acceptance(data: object) -> Union[GovernanceScope, None, Unset]: 186 if data is None: 187 return data 188 if isinstance(data, Unset): 189 return data 190 try: 191 if not isinstance(data, str): 192 raise TypeError() 193 acceptance_type_1 = GovernanceScope(data) 194 195 return acceptance_type_1 196 except: # noqa: E722 197 pass 198 return cast(Union[GovernanceScope, None, Unset], data) 199 200 acceptance = _parse_acceptance(d.pop("acceptance", UNSET)) 201 202 def _parse_enactment_date(data: object) -> Union[None, Unset, datetime.datetime]: 203 if data is None: 204 return data 205 if isinstance(data, Unset): 206 return data 207 try: 208 if not isinstance(data, str): 209 raise TypeError() 210 enactment_date_type_0 = isoparse(data) 211 212 return enactment_date_type_0 213 except: # noqa: E722 214 pass 215 return cast(Union[None, Unset, datetime.datetime], data) 216 217 enactment_date = _parse_enactment_date(d.pop("enactmentDate", UNSET)) 218 219 def _parse_supplemental_docs(data: object) -> Union[List["GovernanceFile"], None, Unset]: 220 if data is None: 221 return data 222 if isinstance(data, Unset): 223 return data 224 try: 225 if not isinstance(data, list): 226 raise TypeError() 227 supplemental_docs_type_0 = [] 228 _supplemental_docs_type_0 = data 229 for supplemental_docs_type_0_item_data in _supplemental_docs_type_0: 230 supplemental_docs_type_0_item = GovernanceFile.from_dict(supplemental_docs_type_0_item_data) 231 232 supplemental_docs_type_0.append(supplemental_docs_type_0_item) 233 234 return supplemental_docs_type_0 235 except: # noqa: E722 236 pass 237 return cast(Union[List["GovernanceFile"], None, Unset], data) 238 239 supplemental_docs = _parse_supplemental_docs(d.pop("supplementalDocs", UNSET)) 240 241 def _parse_file(data: object) -> Union["GovernanceFile", None, Unset]: 242 if data is None: 243 return data 244 if isinstance(data, Unset): 245 return data 246 try: 247 if not isinstance(data, dict): 248 raise TypeError() 249 file_type_1 = GovernanceFile.from_dict(data) 250 251 return file_type_1 252 except: # noqa: E722 253 pass 254 return cast(Union["GovernanceFile", None, Unset], data) 255 256 file = _parse_file(d.pop("file", UNSET)) 257 258 def _parse_authorship(data: object) -> Union[GovernanceScope, None, Unset]: 259 if data is None: 260 return data 261 if isinstance(data, Unset): 262 return data 263 try: 264 if not isinstance(data, str): 265 raise TypeError() 266 authorship_type_1 = GovernanceScope(data) 267 268 return authorship_type_1 269 except: # noqa: E722 270 pass 271 return cast(Union[GovernanceScope, None, Unset], data) 272 273 authorship = _parse_authorship(d.pop("authorship", UNSET)) 274 275 def _parse_verification_method(data: object) -> Union[GovernanceTrainingVerification, None, Unset]: 276 if data is None: 277 return data 278 if isinstance(data, Unset): 279 return data 280 try: 281 if not isinstance(data, str): 282 raise TypeError() 283 verification_method_type_1 = GovernanceTrainingVerification(data) 284 285 return verification_method_type_1 286 except: # noqa: E722 287 pass 288 return cast(Union[GovernanceTrainingVerification, None, Unset], data) 289 290 verification_method = _parse_verification_method(d.pop("verificationMethod", UNSET)) 291 292 requirement_input = cls( 293 name=name, 294 description=description, 295 type=type, 296 scope=scope, 297 contact_ids=contact_ids, 298 expiration=expiration, 299 project_id=project_id, 300 acceptance=acceptance, 301 enactment_date=enactment_date, 302 supplemental_docs=supplemental_docs, 303 file=file, 304 authorship=authorship, 305 verification_method=verification_method, 306 ) 307 308 requirement_input.additional_properties = d 309 return requirement_input
12@_attrs_define 13class ResourcesInfo: 14 """ 15 Attributes: 16 commit (str): 17 date (datetime.datetime): 18 repository (str): 19 source_version (str): 20 """ 21 22 commit: str 23 date: datetime.datetime 24 repository: str 25 source_version: str 26 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 27 28 def to_dict(self) -> Dict[str, Any]: 29 commit = self.commit 30 31 date = self.date.isoformat() 32 33 repository = self.repository 34 35 source_version = self.source_version 36 37 field_dict: Dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "commit": commit, 42 "date": date, 43 "repository": repository, 44 "sourceVersion": source_version, 45 } 46 ) 47 48 return field_dict 49 50 @classmethod 51 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 52 d = src_dict.copy() 53 commit = d.pop("commit") 54 55 date = isoparse(d.pop("date")) 56 57 repository = d.pop("repository") 58 59 source_version = d.pop("sourceVersion") 60 61 resources_info = cls( 62 commit=commit, 63 date=date, 64 repository=repository, 65 source_version=source_version, 66 ) 67 68 resources_info.additional_properties = d 69 return resources_info 70 71 @property 72 def additional_keys(self) -> List[str]: 73 return list(self.additional_properties.keys())
Attributes:
- commit (str):
- date (datetime.datetime):
- repository (str):
- source_version (str):
27def __init__(self, commit, date, repository, source_version): 28 self.commit = commit 29 self.date = date 30 self.repository = repository 31 self.source_version = source_version 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ResourcesInfo.
28 def to_dict(self) -> Dict[str, Any]: 29 commit = self.commit 30 31 date = self.date.isoformat() 32 33 repository = self.repository 34 35 source_version = self.source_version 36 37 field_dict: Dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "commit": commit, 42 "date": date, 43 "repository": repository, 44 "sourceVersion": source_version, 45 } 46 ) 47 48 return field_dict
50 @classmethod 51 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 52 d = src_dict.copy() 53 commit = d.pop("commit") 54 55 date = isoparse(d.pop("date")) 56 57 repository = d.pop("repository") 58 59 source_version = d.pop("sourceVersion") 60 61 resources_info = cls( 62 commit=commit, 63 date=date, 64 repository=repository, 65 source_version=source_version, 66 ) 67 68 resources_info.additional_properties = d 69 return resources_info
17@_attrs_define 18class RunAnalysisRequest: 19 """ 20 Attributes: 21 name (str): Name of the dataset 22 process_id (str): Process ID of the workflow Example: process-nf-core-rnaseq-3_8. 23 source_dataset_ids (List[str]): These datasets contain files that are inputs to this workflow. 24 params (RunAnalysisRequestParams): Parameters used in workflow (can be empty) 25 notification_emails (List[str]): Emails to notify upon workflow success or failure 26 description (Union[None, Unset, str]): Description of the dataset (optional) 27 source_sample_ids (Union[List[str], None, Unset]): Samples within the source datasets that will be used as 28 inputs to this workflow. If not specified, all samples will be used. 29 source_sample_files_map (Union['RunAnalysisRequestSourceSampleFilesMap', None, Unset]): Files containing samples 30 used to define source data input to this workflow. If not specified, all files will be used. Keys are sampleIds, 31 and the lists are file paths to include. 32 resume_dataset_id (Union[None, Unset, str]): Used for caching task execution. If the parameters are the same as 33 the dataset specified here, it will re-use the output to minimize duplicate work 34 compute_environment_id (Union[None, Unset, str]): The compute environment where to run the workflow, if not 35 specified, it will run in AWS 36 """ 37 38 name: str 39 process_id: str 40 source_dataset_ids: List[str] 41 params: "RunAnalysisRequestParams" 42 notification_emails: List[str] 43 description: Union[None, Unset, str] = UNSET 44 source_sample_ids: Union[List[str], None, Unset] = UNSET 45 source_sample_files_map: Union["RunAnalysisRequestSourceSampleFilesMap", None, Unset] = UNSET 46 resume_dataset_id: Union[None, Unset, str] = UNSET 47 compute_environment_id: Union[None, Unset, str] = UNSET 48 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 49 50 def to_dict(self) -> Dict[str, Any]: 51 from ..models.run_analysis_request_source_sample_files_map import RunAnalysisRequestSourceSampleFilesMap 52 53 name = self.name 54 55 process_id = self.process_id 56 57 source_dataset_ids = self.source_dataset_ids 58 59 params = self.params.to_dict() 60 61 notification_emails = self.notification_emails 62 63 description: Union[None, Unset, str] 64 if isinstance(self.description, Unset): 65 description = UNSET 66 else: 67 description = self.description 68 69 source_sample_ids: Union[List[str], None, Unset] 70 if isinstance(self.source_sample_ids, Unset): 71 source_sample_ids = UNSET 72 elif isinstance(self.source_sample_ids, list): 73 source_sample_ids = self.source_sample_ids 74 75 else: 76 source_sample_ids = self.source_sample_ids 77 78 source_sample_files_map: Union[Dict[str, Any], None, Unset] 79 if isinstance(self.source_sample_files_map, Unset): 80 source_sample_files_map = UNSET 81 elif isinstance(self.source_sample_files_map, RunAnalysisRequestSourceSampleFilesMap): 82 source_sample_files_map = self.source_sample_files_map.to_dict() 83 else: 84 source_sample_files_map = self.source_sample_files_map 85 86 resume_dataset_id: Union[None, Unset, str] 87 if isinstance(self.resume_dataset_id, Unset): 88 resume_dataset_id = UNSET 89 else: 90 resume_dataset_id = self.resume_dataset_id 91 92 compute_environment_id: Union[None, Unset, str] 93 if isinstance(self.compute_environment_id, Unset): 94 compute_environment_id = UNSET 95 else: 96 compute_environment_id = self.compute_environment_id 97 98 field_dict: Dict[str, Any] = {} 99 field_dict.update(self.additional_properties) 100 field_dict.update( 101 { 102 "name": name, 103 "processId": process_id, 104 "sourceDatasetIds": source_dataset_ids, 105 "params": params, 106 "notificationEmails": notification_emails, 107 } 108 ) 109 if description is not UNSET: 110 field_dict["description"] = description 111 if source_sample_ids is not UNSET: 112 field_dict["sourceSampleIds"] = source_sample_ids 113 if source_sample_files_map is not UNSET: 114 field_dict["sourceSampleFilesMap"] = source_sample_files_map 115 if resume_dataset_id is not UNSET: 116 field_dict["resumeDatasetId"] = resume_dataset_id 117 if compute_environment_id is not UNSET: 118 field_dict["computeEnvironmentId"] = compute_environment_id 119 120 return field_dict 121 122 @classmethod 123 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 124 from ..models.run_analysis_request_params import RunAnalysisRequestParams 125 from ..models.run_analysis_request_source_sample_files_map import RunAnalysisRequestSourceSampleFilesMap 126 127 d = src_dict.copy() 128 name = d.pop("name") 129 130 process_id = d.pop("processId") 131 132 source_dataset_ids = cast(List[str], d.pop("sourceDatasetIds")) 133 134 params = RunAnalysisRequestParams.from_dict(d.pop("params")) 135 136 notification_emails = cast(List[str], d.pop("notificationEmails")) 137 138 def _parse_description(data: object) -> Union[None, Unset, str]: 139 if data is None: 140 return data 141 if isinstance(data, Unset): 142 return data 143 return cast(Union[None, Unset, str], data) 144 145 description = _parse_description(d.pop("description", UNSET)) 146 147 def _parse_source_sample_ids(data: object) -> Union[List[str], None, Unset]: 148 if data is None: 149 return data 150 if isinstance(data, Unset): 151 return data 152 try: 153 if not isinstance(data, list): 154 raise TypeError() 155 source_sample_ids_type_0 = cast(List[str], data) 156 157 return source_sample_ids_type_0 158 except: # noqa: E722 159 pass 160 return cast(Union[List[str], None, Unset], data) 161 162 source_sample_ids = _parse_source_sample_ids(d.pop("sourceSampleIds", UNSET)) 163 164 def _parse_source_sample_files_map( 165 data: object, 166 ) -> Union["RunAnalysisRequestSourceSampleFilesMap", None, Unset]: 167 if data is None: 168 return data 169 if isinstance(data, Unset): 170 return data 171 try: 172 if not isinstance(data, dict): 173 raise TypeError() 174 source_sample_files_map_type_0 = RunAnalysisRequestSourceSampleFilesMap.from_dict(data) 175 176 return source_sample_files_map_type_0 177 except: # noqa: E722 178 pass 179 return cast(Union["RunAnalysisRequestSourceSampleFilesMap", None, Unset], data) 180 181 source_sample_files_map = _parse_source_sample_files_map(d.pop("sourceSampleFilesMap", UNSET)) 182 183 def _parse_resume_dataset_id(data: object) -> Union[None, Unset, str]: 184 if data is None: 185 return data 186 if isinstance(data, Unset): 187 return data 188 return cast(Union[None, Unset, str], data) 189 190 resume_dataset_id = _parse_resume_dataset_id(d.pop("resumeDatasetId", UNSET)) 191 192 def _parse_compute_environment_id(data: object) -> Union[None, Unset, str]: 193 if data is None: 194 return data 195 if isinstance(data, Unset): 196 return data 197 return cast(Union[None, Unset, str], data) 198 199 compute_environment_id = _parse_compute_environment_id(d.pop("computeEnvironmentId", UNSET)) 200 201 run_analysis_request = cls( 202 name=name, 203 process_id=process_id, 204 source_dataset_ids=source_dataset_ids, 205 params=params, 206 notification_emails=notification_emails, 207 description=description, 208 source_sample_ids=source_sample_ids, 209 source_sample_files_map=source_sample_files_map, 210 resume_dataset_id=resume_dataset_id, 211 compute_environment_id=compute_environment_id, 212 ) 213 214 run_analysis_request.additional_properties = d 215 return run_analysis_request 216 217 @property 218 def additional_keys(self) -> List[str]: 219 return list(self.additional_properties.keys())
Attributes:
- name (str): Name of the dataset
- process_id (str): Process ID of the workflow Example: process-nf-core-rnaseq-3_8.
- source_dataset_ids (List[str]): These datasets contain files that are inputs to this workflow.
- params (RunAnalysisRequestParams): Parameters used in workflow (can be empty)
- notification_emails (List[str]): Emails to notify upon workflow success or failure
- description (Union[None, Unset, str]): Description of the dataset (optional)
- source_sample_ids (Union[List[str], None, Unset]): Samples within the source datasets that will be used as inputs to this workflow. If not specified, all samples will be used.
- source_sample_files_map (Union['RunAnalysisRequestSourceSampleFilesMap', None, Unset]): Files containing samples used to define source data input to this workflow. If not specified, all files will be used. Keys are sampleIds, and the lists are file paths to include.
- resume_dataset_id (Union[None, Unset, str]): Used for caching task execution. If the parameters are the same as the dataset specified here, it will re-use the output to minimize duplicate work
- compute_environment_id (Union[None, Unset, str]): The compute environment where to run the workflow, if not specified, it will run in AWS
33def __init__(self, name, process_id, source_dataset_ids, params, notification_emails, description=attr_dict['description'].default, source_sample_ids=attr_dict['source_sample_ids'].default, source_sample_files_map=attr_dict['source_sample_files_map'].default, resume_dataset_id=attr_dict['resume_dataset_id'].default, compute_environment_id=attr_dict['compute_environment_id'].default): 34 self.name = name 35 self.process_id = process_id 36 self.source_dataset_ids = source_dataset_ids 37 self.params = params 38 self.notification_emails = notification_emails 39 self.description = description 40 self.source_sample_ids = source_sample_ids 41 self.source_sample_files_map = source_sample_files_map 42 self.resume_dataset_id = resume_dataset_id 43 self.compute_environment_id = compute_environment_id 44 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class RunAnalysisRequest.
50 def to_dict(self) -> Dict[str, Any]: 51 from ..models.run_analysis_request_source_sample_files_map import RunAnalysisRequestSourceSampleFilesMap 52 53 name = self.name 54 55 process_id = self.process_id 56 57 source_dataset_ids = self.source_dataset_ids 58 59 params = self.params.to_dict() 60 61 notification_emails = self.notification_emails 62 63 description: Union[None, Unset, str] 64 if isinstance(self.description, Unset): 65 description = UNSET 66 else: 67 description = self.description 68 69 source_sample_ids: Union[List[str], None, Unset] 70 if isinstance(self.source_sample_ids, Unset): 71 source_sample_ids = UNSET 72 elif isinstance(self.source_sample_ids, list): 73 source_sample_ids = self.source_sample_ids 74 75 else: 76 source_sample_ids = self.source_sample_ids 77 78 source_sample_files_map: Union[Dict[str, Any], None, Unset] 79 if isinstance(self.source_sample_files_map, Unset): 80 source_sample_files_map = UNSET 81 elif isinstance(self.source_sample_files_map, RunAnalysisRequestSourceSampleFilesMap): 82 source_sample_files_map = self.source_sample_files_map.to_dict() 83 else: 84 source_sample_files_map = self.source_sample_files_map 85 86 resume_dataset_id: Union[None, Unset, str] 87 if isinstance(self.resume_dataset_id, Unset): 88 resume_dataset_id = UNSET 89 else: 90 resume_dataset_id = self.resume_dataset_id 91 92 compute_environment_id: Union[None, Unset, str] 93 if isinstance(self.compute_environment_id, Unset): 94 compute_environment_id = UNSET 95 else: 96 compute_environment_id = self.compute_environment_id 97 98 field_dict: Dict[str, Any] = {} 99 field_dict.update(self.additional_properties) 100 field_dict.update( 101 { 102 "name": name, 103 "processId": process_id, 104 "sourceDatasetIds": source_dataset_ids, 105 "params": params, 106 "notificationEmails": notification_emails, 107 } 108 ) 109 if description is not UNSET: 110 field_dict["description"] = description 111 if source_sample_ids is not UNSET: 112 field_dict["sourceSampleIds"] = source_sample_ids 113 if source_sample_files_map is not UNSET: 114 field_dict["sourceSampleFilesMap"] = source_sample_files_map 115 if resume_dataset_id is not UNSET: 116 field_dict["resumeDatasetId"] = resume_dataset_id 117 if compute_environment_id is not UNSET: 118 field_dict["computeEnvironmentId"] = compute_environment_id 119 120 return field_dict
122 @classmethod 123 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 124 from ..models.run_analysis_request_params import RunAnalysisRequestParams 125 from ..models.run_analysis_request_source_sample_files_map import RunAnalysisRequestSourceSampleFilesMap 126 127 d = src_dict.copy() 128 name = d.pop("name") 129 130 process_id = d.pop("processId") 131 132 source_dataset_ids = cast(List[str], d.pop("sourceDatasetIds")) 133 134 params = RunAnalysisRequestParams.from_dict(d.pop("params")) 135 136 notification_emails = cast(List[str], d.pop("notificationEmails")) 137 138 def _parse_description(data: object) -> Union[None, Unset, str]: 139 if data is None: 140 return data 141 if isinstance(data, Unset): 142 return data 143 return cast(Union[None, Unset, str], data) 144 145 description = _parse_description(d.pop("description", UNSET)) 146 147 def _parse_source_sample_ids(data: object) -> Union[List[str], None, Unset]: 148 if data is None: 149 return data 150 if isinstance(data, Unset): 151 return data 152 try: 153 if not isinstance(data, list): 154 raise TypeError() 155 source_sample_ids_type_0 = cast(List[str], data) 156 157 return source_sample_ids_type_0 158 except: # noqa: E722 159 pass 160 return cast(Union[List[str], None, Unset], data) 161 162 source_sample_ids = _parse_source_sample_ids(d.pop("sourceSampleIds", UNSET)) 163 164 def _parse_source_sample_files_map( 165 data: object, 166 ) -> Union["RunAnalysisRequestSourceSampleFilesMap", None, Unset]: 167 if data is None: 168 return data 169 if isinstance(data, Unset): 170 return data 171 try: 172 if not isinstance(data, dict): 173 raise TypeError() 174 source_sample_files_map_type_0 = RunAnalysisRequestSourceSampleFilesMap.from_dict(data) 175 176 return source_sample_files_map_type_0 177 except: # noqa: E722 178 pass 179 return cast(Union["RunAnalysisRequestSourceSampleFilesMap", None, Unset], data) 180 181 source_sample_files_map = _parse_source_sample_files_map(d.pop("sourceSampleFilesMap", UNSET)) 182 183 def _parse_resume_dataset_id(data: object) -> Union[None, Unset, str]: 184 if data is None: 185 return data 186 if isinstance(data, Unset): 187 return data 188 return cast(Union[None, Unset, str], data) 189 190 resume_dataset_id = _parse_resume_dataset_id(d.pop("resumeDatasetId", UNSET)) 191 192 def _parse_compute_environment_id(data: object) -> Union[None, Unset, str]: 193 if data is None: 194 return data 195 if isinstance(data, Unset): 196 return data 197 return cast(Union[None, Unset, str], data) 198 199 compute_environment_id = _parse_compute_environment_id(d.pop("computeEnvironmentId", UNSET)) 200 201 run_analysis_request = cls( 202 name=name, 203 process_id=process_id, 204 source_dataset_ids=source_dataset_ids, 205 params=params, 206 notification_emails=notification_emails, 207 description=description, 208 source_sample_ids=source_sample_ids, 209 source_sample_files_map=source_sample_files_map, 210 resume_dataset_id=resume_dataset_id, 211 compute_environment_id=compute_environment_id, 212 ) 213 214 run_analysis_request.additional_properties = d 215 return run_analysis_request
10@_attrs_define 11class RunAnalysisRequestParams: 12 """Parameters used in workflow (can be empty)""" 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 run_analysis_request_params = cls() 27 28 run_analysis_request_params.additional_properties = d 29 return run_analysis_request_params 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
Parameters used in workflow (can be empty)
10@_attrs_define 11class RunAnalysisRequestSourceSampleFilesMap: 12 """Files containing samples used to define source data input to this workflow. If not specified, all files will be 13 used. Keys are sampleIds, and the lists are file paths to include. 14 15 """ 16 17 additional_properties: Dict[str, List[str]] = _attrs_field(init=False, factory=dict) 18 19 def to_dict(self) -> Dict[str, Any]: 20 field_dict: Dict[str, Any] = {} 21 for prop_name, prop in self.additional_properties.items(): 22 field_dict[prop_name] = prop 23 24 field_dict.update({}) 25 26 return field_dict 27 28 @classmethod 29 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 30 d = src_dict.copy() 31 run_analysis_request_source_sample_files_map = cls() 32 33 additional_properties = {} 34 for prop_name, prop_dict in d.items(): 35 additional_property = cast(List[str], prop_dict) 36 37 additional_properties[prop_name] = additional_property 38 39 run_analysis_request_source_sample_files_map.additional_properties = additional_properties 40 return run_analysis_request_source_sample_files_map 41 42 @property 43 def additional_keys(self) -> List[str]: 44 return list(self.additional_properties.keys())
Files containing samples used to define source data input to this workflow. If not specified, all files will be used. Keys are sampleIds, and the lists are file paths to include.
Method generated by attrs for class RunAnalysisRequestSourceSampleFilesMap.
28 @classmethod 29 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 30 d = src_dict.copy() 31 run_analysis_request_source_sample_files_map = cls() 32 33 additional_properties = {} 34 for prop_name, prop_dict in d.items(): 35 additional_property = cast(List[str], prop_dict) 36 37 additional_properties[prop_name] = additional_property 38 39 run_analysis_request_source_sample_files_map.additional_properties = additional_properties 40 return run_analysis_request_source_sample_files_map
19@_attrs_define 20class Sample: 21 """ 22 Attributes: 23 id (str): 24 name (str): 25 metadata (Union['SampleMetadata', None, Unset]): 26 files (Union[List['DataFile'], None, Unset]): Files associated with this sample 27 dataset_ids (Union[List[str], None, Unset]): 28 created_at (Union[None, Unset, datetime.datetime]): 29 updated_at (Union[None, Unset, datetime.datetime]): 30 """ 31 32 id: str 33 name: str 34 metadata: Union["SampleMetadata", None, Unset] = UNSET 35 files: Union[List["DataFile"], None, Unset] = UNSET 36 dataset_ids: Union[List[str], None, Unset] = UNSET 37 created_at: Union[None, Unset, datetime.datetime] = UNSET 38 updated_at: Union[None, Unset, datetime.datetime] = UNSET 39 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 40 41 def to_dict(self) -> Dict[str, Any]: 42 from ..models.sample_metadata import SampleMetadata 43 44 id = self.id 45 46 name = self.name 47 48 metadata: Union[Dict[str, Any], None, Unset] 49 if isinstance(self.metadata, Unset): 50 metadata = UNSET 51 elif isinstance(self.metadata, SampleMetadata): 52 metadata = self.metadata.to_dict() 53 else: 54 metadata = self.metadata 55 56 files: Union[List[Dict[str, Any]], None, Unset] 57 if isinstance(self.files, Unset): 58 files = UNSET 59 elif isinstance(self.files, list): 60 files = [] 61 for files_type_0_item_data in self.files: 62 files_type_0_item = files_type_0_item_data.to_dict() 63 files.append(files_type_0_item) 64 65 else: 66 files = self.files 67 68 dataset_ids: Union[List[str], None, Unset] 69 if isinstance(self.dataset_ids, Unset): 70 dataset_ids = UNSET 71 elif isinstance(self.dataset_ids, list): 72 dataset_ids = self.dataset_ids 73 74 else: 75 dataset_ids = self.dataset_ids 76 77 created_at: Union[None, Unset, str] 78 if isinstance(self.created_at, Unset): 79 created_at = UNSET 80 elif isinstance(self.created_at, datetime.datetime): 81 created_at = self.created_at.isoformat() 82 else: 83 created_at = self.created_at 84 85 updated_at: Union[None, Unset, str] 86 if isinstance(self.updated_at, Unset): 87 updated_at = UNSET 88 elif isinstance(self.updated_at, datetime.datetime): 89 updated_at = self.updated_at.isoformat() 90 else: 91 updated_at = self.updated_at 92 93 field_dict: Dict[str, Any] = {} 94 field_dict.update(self.additional_properties) 95 field_dict.update( 96 { 97 "id": id, 98 "name": name, 99 } 100 ) 101 if metadata is not UNSET: 102 field_dict["metadata"] = metadata 103 if files is not UNSET: 104 field_dict["files"] = files 105 if dataset_ids is not UNSET: 106 field_dict["datasetIds"] = dataset_ids 107 if created_at is not UNSET: 108 field_dict["createdAt"] = created_at 109 if updated_at is not UNSET: 110 field_dict["updatedAt"] = updated_at 111 112 return field_dict 113 114 @classmethod 115 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 116 from ..models.data_file import DataFile 117 from ..models.sample_metadata import SampleMetadata 118 119 d = src_dict.copy() 120 id = d.pop("id") 121 122 name = d.pop("name") 123 124 def _parse_metadata(data: object) -> Union["SampleMetadata", None, Unset]: 125 if data is None: 126 return data 127 if isinstance(data, Unset): 128 return data 129 try: 130 if not isinstance(data, dict): 131 raise TypeError() 132 metadata_type_0 = SampleMetadata.from_dict(data) 133 134 return metadata_type_0 135 except: # noqa: E722 136 pass 137 return cast(Union["SampleMetadata", None, Unset], data) 138 139 metadata = _parse_metadata(d.pop("metadata", UNSET)) 140 141 def _parse_files(data: object) -> Union[List["DataFile"], None, Unset]: 142 if data is None: 143 return data 144 if isinstance(data, Unset): 145 return data 146 try: 147 if not isinstance(data, list): 148 raise TypeError() 149 files_type_0 = [] 150 _files_type_0 = data 151 for files_type_0_item_data in _files_type_0: 152 files_type_0_item = DataFile.from_dict(files_type_0_item_data) 153 154 files_type_0.append(files_type_0_item) 155 156 return files_type_0 157 except: # noqa: E722 158 pass 159 return cast(Union[List["DataFile"], None, Unset], data) 160 161 files = _parse_files(d.pop("files", UNSET)) 162 163 def _parse_dataset_ids(data: object) -> Union[List[str], None, Unset]: 164 if data is None: 165 return data 166 if isinstance(data, Unset): 167 return data 168 try: 169 if not isinstance(data, list): 170 raise TypeError() 171 dataset_ids_type_0 = cast(List[str], data) 172 173 return dataset_ids_type_0 174 except: # noqa: E722 175 pass 176 return cast(Union[List[str], None, Unset], data) 177 178 dataset_ids = _parse_dataset_ids(d.pop("datasetIds", UNSET)) 179 180 def _parse_created_at(data: object) -> Union[None, Unset, datetime.datetime]: 181 if data is None: 182 return data 183 if isinstance(data, Unset): 184 return data 185 try: 186 if not isinstance(data, str): 187 raise TypeError() 188 created_at_type_0 = isoparse(data) 189 190 return created_at_type_0 191 except: # noqa: E722 192 pass 193 return cast(Union[None, Unset, datetime.datetime], data) 194 195 created_at = _parse_created_at(d.pop("createdAt", UNSET)) 196 197 def _parse_updated_at(data: object) -> Union[None, Unset, datetime.datetime]: 198 if data is None: 199 return data 200 if isinstance(data, Unset): 201 return data 202 try: 203 if not isinstance(data, str): 204 raise TypeError() 205 updated_at_type_0 = isoparse(data) 206 207 return updated_at_type_0 208 except: # noqa: E722 209 pass 210 return cast(Union[None, Unset, datetime.datetime], data) 211 212 updated_at = _parse_updated_at(d.pop("updatedAt", UNSET)) 213 214 sample = cls( 215 id=id, 216 name=name, 217 metadata=metadata, 218 files=files, 219 dataset_ids=dataset_ids, 220 created_at=created_at, 221 updated_at=updated_at, 222 ) 223 224 sample.additional_properties = d 225 return sample 226 227 @property 228 def additional_keys(self) -> List[str]: 229 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- metadata (Union['SampleMetadata', None, Unset]):
- files (Union[List['DataFile'], None, Unset]): Files associated with this sample
- dataset_ids (Union[List[str], None, Unset]):
- created_at (Union[None, Unset, datetime.datetime]):
- updated_at (Union[None, Unset, datetime.datetime]):
30def __init__(self, id, name, metadata=attr_dict['metadata'].default, files=attr_dict['files'].default, dataset_ids=attr_dict['dataset_ids'].default, created_at=attr_dict['created_at'].default, updated_at=attr_dict['updated_at'].default): 31 self.id = id 32 self.name = name 33 self.metadata = metadata 34 self.files = files 35 self.dataset_ids = dataset_ids 36 self.created_at = created_at 37 self.updated_at = updated_at 38 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Sample.
41 def to_dict(self) -> Dict[str, Any]: 42 from ..models.sample_metadata import SampleMetadata 43 44 id = self.id 45 46 name = self.name 47 48 metadata: Union[Dict[str, Any], None, Unset] 49 if isinstance(self.metadata, Unset): 50 metadata = UNSET 51 elif isinstance(self.metadata, SampleMetadata): 52 metadata = self.metadata.to_dict() 53 else: 54 metadata = self.metadata 55 56 files: Union[List[Dict[str, Any]], None, Unset] 57 if isinstance(self.files, Unset): 58 files = UNSET 59 elif isinstance(self.files, list): 60 files = [] 61 for files_type_0_item_data in self.files: 62 files_type_0_item = files_type_0_item_data.to_dict() 63 files.append(files_type_0_item) 64 65 else: 66 files = self.files 67 68 dataset_ids: Union[List[str], None, Unset] 69 if isinstance(self.dataset_ids, Unset): 70 dataset_ids = UNSET 71 elif isinstance(self.dataset_ids, list): 72 dataset_ids = self.dataset_ids 73 74 else: 75 dataset_ids = self.dataset_ids 76 77 created_at: Union[None, Unset, str] 78 if isinstance(self.created_at, Unset): 79 created_at = UNSET 80 elif isinstance(self.created_at, datetime.datetime): 81 created_at = self.created_at.isoformat() 82 else: 83 created_at = self.created_at 84 85 updated_at: Union[None, Unset, str] 86 if isinstance(self.updated_at, Unset): 87 updated_at = UNSET 88 elif isinstance(self.updated_at, datetime.datetime): 89 updated_at = self.updated_at.isoformat() 90 else: 91 updated_at = self.updated_at 92 93 field_dict: Dict[str, Any] = {} 94 field_dict.update(self.additional_properties) 95 field_dict.update( 96 { 97 "id": id, 98 "name": name, 99 } 100 ) 101 if metadata is not UNSET: 102 field_dict["metadata"] = metadata 103 if files is not UNSET: 104 field_dict["files"] = files 105 if dataset_ids is not UNSET: 106 field_dict["datasetIds"] = dataset_ids 107 if created_at is not UNSET: 108 field_dict["createdAt"] = created_at 109 if updated_at is not UNSET: 110 field_dict["updatedAt"] = updated_at 111 112 return field_dict
114 @classmethod 115 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 116 from ..models.data_file import DataFile 117 from ..models.sample_metadata import SampleMetadata 118 119 d = src_dict.copy() 120 id = d.pop("id") 121 122 name = d.pop("name") 123 124 def _parse_metadata(data: object) -> Union["SampleMetadata", None, Unset]: 125 if data is None: 126 return data 127 if isinstance(data, Unset): 128 return data 129 try: 130 if not isinstance(data, dict): 131 raise TypeError() 132 metadata_type_0 = SampleMetadata.from_dict(data) 133 134 return metadata_type_0 135 except: # noqa: E722 136 pass 137 return cast(Union["SampleMetadata", None, Unset], data) 138 139 metadata = _parse_metadata(d.pop("metadata", UNSET)) 140 141 def _parse_files(data: object) -> Union[List["DataFile"], None, Unset]: 142 if data is None: 143 return data 144 if isinstance(data, Unset): 145 return data 146 try: 147 if not isinstance(data, list): 148 raise TypeError() 149 files_type_0 = [] 150 _files_type_0 = data 151 for files_type_0_item_data in _files_type_0: 152 files_type_0_item = DataFile.from_dict(files_type_0_item_data) 153 154 files_type_0.append(files_type_0_item) 155 156 return files_type_0 157 except: # noqa: E722 158 pass 159 return cast(Union[List["DataFile"], None, Unset], data) 160 161 files = _parse_files(d.pop("files", UNSET)) 162 163 def _parse_dataset_ids(data: object) -> Union[List[str], None, Unset]: 164 if data is None: 165 return data 166 if isinstance(data, Unset): 167 return data 168 try: 169 if not isinstance(data, list): 170 raise TypeError() 171 dataset_ids_type_0 = cast(List[str], data) 172 173 return dataset_ids_type_0 174 except: # noqa: E722 175 pass 176 return cast(Union[List[str], None, Unset], data) 177 178 dataset_ids = _parse_dataset_ids(d.pop("datasetIds", UNSET)) 179 180 def _parse_created_at(data: object) -> Union[None, Unset, datetime.datetime]: 181 if data is None: 182 return data 183 if isinstance(data, Unset): 184 return data 185 try: 186 if not isinstance(data, str): 187 raise TypeError() 188 created_at_type_0 = isoparse(data) 189 190 return created_at_type_0 191 except: # noqa: E722 192 pass 193 return cast(Union[None, Unset, datetime.datetime], data) 194 195 created_at = _parse_created_at(d.pop("createdAt", UNSET)) 196 197 def _parse_updated_at(data: object) -> Union[None, Unset, datetime.datetime]: 198 if data is None: 199 return data 200 if isinstance(data, Unset): 201 return data 202 try: 203 if not isinstance(data, str): 204 raise TypeError() 205 updated_at_type_0 = isoparse(data) 206 207 return updated_at_type_0 208 except: # noqa: E722 209 pass 210 return cast(Union[None, Unset, datetime.datetime], data) 211 212 updated_at = _parse_updated_at(d.pop("updatedAt", UNSET)) 213 214 sample = cls( 215 id=id, 216 name=name, 217 metadata=metadata, 218 files=files, 219 dataset_ids=dataset_ids, 220 created_at=created_at, 221 updated_at=updated_at, 222 ) 223 224 sample.additional_properties = d 225 return sample
10@_attrs_define 11class SampleMetadata: 12 """ """ 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 sample_metadata = cls() 27 28 sample_metadata.additional_properties = d 29 return sample_metadata 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
14@_attrs_define 15class SampleRequest: 16 """ 17 Attributes: 18 name (str): 19 metadata (SampleRequestMetadata): 20 """ 21 22 name: str 23 metadata: "SampleRequestMetadata" 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 name = self.name 28 29 metadata = self.metadata.to_dict() 30 31 field_dict: Dict[str, Any] = {} 32 field_dict.update(self.additional_properties) 33 field_dict.update( 34 { 35 "name": name, 36 "metadata": metadata, 37 } 38 ) 39 40 return field_dict 41 42 @classmethod 43 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 44 from ..models.sample_request_metadata import SampleRequestMetadata 45 46 d = src_dict.copy() 47 name = d.pop("name") 48 49 metadata = SampleRequestMetadata.from_dict(d.pop("metadata")) 50 51 sample_request = cls( 52 name=name, 53 metadata=metadata, 54 ) 55 56 sample_request.additional_properties = d 57 return sample_request 58 59 @property 60 def additional_keys(self) -> List[str]: 61 return list(self.additional_properties.keys())
Attributes:
- name (str):
- metadata (SampleRequestMetadata):
25def __init__(self, name, metadata): 26 self.name = name 27 self.metadata = metadata 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class SampleRequest.
42 @classmethod 43 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 44 from ..models.sample_request_metadata import SampleRequestMetadata 45 46 d = src_dict.copy() 47 name = d.pop("name") 48 49 metadata = SampleRequestMetadata.from_dict(d.pop("metadata")) 50 51 sample_request = cls( 52 name=name, 53 metadata=metadata, 54 ) 55 56 sample_request.additional_properties = d 57 return sample_request
10@_attrs_define 11class SampleRequestMetadata: 12 """ """ 13 14 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 15 16 def to_dict(self) -> Dict[str, Any]: 17 field_dict: Dict[str, Any] = {} 18 field_dict.update(self.additional_properties) 19 field_dict.update({}) 20 21 return field_dict 22 23 @classmethod 24 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 25 d = src_dict.copy() 26 sample_request_metadata = cls() 27 28 sample_request_metadata.additional_properties = d 29 return sample_request_metadata 30 31 @property 32 def additional_keys(self) -> List[str]: 33 return list(self.additional_properties.keys())
12@_attrs_define 13class SampleSheets: 14 """ 15 Attributes: 16 samples (Union[Unset, str]): Written to samplesheet.csv, available as ds.samplesheet in preprocess 17 files (Union[Unset, str]): Written to files.csv, available as ds.files in preprocess 18 """ 19 20 samples: Union[Unset, str] = UNSET 21 files: Union[Unset, str] = UNSET 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 samples = self.samples 26 27 files = self.files 28 29 field_dict: Dict[str, Any] = {} 30 field_dict.update(self.additional_properties) 31 field_dict.update({}) 32 if samples is not UNSET: 33 field_dict["samples"] = samples 34 if files is not UNSET: 35 field_dict["files"] = files 36 37 return field_dict 38 39 @classmethod 40 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 41 d = src_dict.copy() 42 samples = d.pop("samples", UNSET) 43 44 files = d.pop("files", UNSET) 45 46 sample_sheets = cls( 47 samples=samples, 48 files=files, 49 ) 50 51 sample_sheets.additional_properties = d 52 return sample_sheets 53 54 @property 55 def additional_keys(self) -> List[str]: 56 return list(self.additional_properties.keys())
Attributes:
- samples (Union[Unset, str]): Written to samplesheet.csv, available as ds.samplesheet in preprocess
- files (Union[Unset, str]): Written to files.csv, available as ds.files in preprocess
25def __init__(self, samples=attr_dict['samples'].default, files=attr_dict['files'].default): 26 self.samples = samples 27 self.files = files 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class SampleSheets.
24 def to_dict(self) -> Dict[str, Any]: 25 samples = self.samples 26 27 files = self.files 28 29 field_dict: Dict[str, Any] = {} 30 field_dict.update(self.additional_properties) 31 field_dict.update({}) 32 if samples is not UNSET: 33 field_dict["samples"] = samples 34 if files is not UNSET: 35 field_dict["files"] = files 36 37 return field_dict
39 @classmethod 40 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 41 d = src_dict.copy() 42 samples = d.pop("samples", UNSET) 43 44 files = d.pop("files", UNSET) 45 46 sample_sheets = cls( 47 samples=samples, 48 files=files, 49 ) 50 51 sample_sheets.additional_properties = d 52 return sample_sheets
10@_attrs_define 11class ServiceConnection: 12 """ 13 Attributes: 14 name (str): 15 description (str): 16 """ 17 18 name: str 19 description: str 20 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 21 22 def to_dict(self) -> Dict[str, Any]: 23 name = self.name 24 25 description = self.description 26 27 field_dict: Dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "name": name, 32 "description": description, 33 } 34 ) 35 36 return field_dict 37 38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 name = d.pop("name") 42 43 description = d.pop("description") 44 45 service_connection = cls( 46 name=name, 47 description=description, 48 ) 49 50 service_connection.additional_properties = d 51 return service_connection 52 53 @property 54 def additional_keys(self) -> List[str]: 55 return list(self.additional_properties.keys())
Attributes:
- name (str):
- description (str):
25def __init__(self, name, description): 26 self.name = name 27 self.description = description 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ServiceConnection.
38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 name = d.pop("name") 42 43 description = d.pop("description") 44 45 service_connection = cls( 46 name=name, 47 description=description, 48 ) 49 50 service_connection.additional_properties = d 51 return service_connection
13@_attrs_define 14class SetUserProjectRoleRequest: 15 """ 16 Attributes: 17 username (str): 18 role (ProjectRole): 19 suppress_notification (Union[Unset, bool]): Default: False. 20 """ 21 22 username: str 23 role: ProjectRole 24 suppress_notification: Union[Unset, bool] = False 25 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 26 27 def to_dict(self) -> Dict[str, Any]: 28 username = self.username 29 30 role = self.role.value 31 32 suppress_notification = self.suppress_notification 33 34 field_dict: Dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "username": username, 39 "role": role, 40 } 41 ) 42 if suppress_notification is not UNSET: 43 field_dict["suppressNotification"] = suppress_notification 44 45 return field_dict 46 47 @classmethod 48 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 49 d = src_dict.copy() 50 username = d.pop("username") 51 52 role = ProjectRole(d.pop("role")) 53 54 suppress_notification = d.pop("suppressNotification", UNSET) 55 56 set_user_project_role_request = cls( 57 username=username, 58 role=role, 59 suppress_notification=suppress_notification, 60 ) 61 62 set_user_project_role_request.additional_properties = d 63 return set_user_project_role_request 64 65 @property 66 def additional_keys(self) -> List[str]: 67 return list(self.additional_properties.keys())
Attributes:
- username (str):
- role (ProjectRole):
- suppress_notification (Union[Unset, bool]): Default: False.
26def __init__(self, username, role, suppress_notification=attr_dict['suppress_notification'].default): 27 self.username = username 28 self.role = role 29 self.suppress_notification = suppress_notification 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class SetUserProjectRoleRequest.
27 def to_dict(self) -> Dict[str, Any]: 28 username = self.username 29 30 role = self.role.value 31 32 suppress_notification = self.suppress_notification 33 34 field_dict: Dict[str, Any] = {} 35 field_dict.update(self.additional_properties) 36 field_dict.update( 37 { 38 "username": username, 39 "role": role, 40 } 41 ) 42 if suppress_notification is not UNSET: 43 field_dict["suppressNotification"] = suppress_notification 44 45 return field_dict
47 @classmethod 48 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 49 d = src_dict.copy() 50 username = d.pop("username") 51 52 role = ProjectRole(d.pop("role")) 53 54 suppress_notification = d.pop("suppressNotification", UNSET) 55 56 set_user_project_role_request = cls( 57 username=username, 58 role=role, 59 suppress_notification=suppress_notification, 60 ) 61 62 set_user_project_role_request.additional_properties = d 63 return set_user_project_role_request
12@_attrs_define 13class SftpCredentials: 14 """ 15 Attributes: 16 username (str): 17 password (str): 18 project_id (str): 19 expires_at (datetime.datetime): 20 """ 21 22 username: str 23 password: str 24 project_id: str 25 expires_at: datetime.datetime 26 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 27 28 def to_dict(self) -> Dict[str, Any]: 29 username = self.username 30 31 password = self.password 32 33 project_id = self.project_id 34 35 expires_at = self.expires_at.isoformat() 36 37 field_dict: Dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "username": username, 42 "password": password, 43 "projectId": project_id, 44 "expiresAt": expires_at, 45 } 46 ) 47 48 return field_dict 49 50 @classmethod 51 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 52 d = src_dict.copy() 53 username = d.pop("username") 54 55 password = d.pop("password") 56 57 project_id = d.pop("projectId") 58 59 expires_at = isoparse(d.pop("expiresAt")) 60 61 sftp_credentials = cls( 62 username=username, 63 password=password, 64 project_id=project_id, 65 expires_at=expires_at, 66 ) 67 68 sftp_credentials.additional_properties = d 69 return sftp_credentials 70 71 @property 72 def additional_keys(self) -> List[str]: 73 return list(self.additional_properties.keys())
Attributes:
- username (str):
- password (str):
- project_id (str):
- expires_at (datetime.datetime):
27def __init__(self, username, password, project_id, expires_at): 28 self.username = username 29 self.password = password 30 self.project_id = project_id 31 self.expires_at = expires_at 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class SftpCredentials.
28 def to_dict(self) -> Dict[str, Any]: 29 username = self.username 30 31 password = self.password 32 33 project_id = self.project_id 34 35 expires_at = self.expires_at.isoformat() 36 37 field_dict: Dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "username": username, 42 "password": password, 43 "projectId": project_id, 44 "expiresAt": expires_at, 45 } 46 ) 47 48 return field_dict
50 @classmethod 51 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 52 d = src_dict.copy() 53 username = d.pop("username") 54 55 password = d.pop("password") 56 57 project_id = d.pop("projectId") 58 59 expires_at = isoparse(d.pop("expiresAt")) 60 61 sftp_credentials = cls( 62 username=username, 63 password=password, 64 project_id=project_id, 65 expires_at=expires_at, 66 ) 67 68 sftp_credentials.additional_properties = d 69 return sftp_credentials
5class SortOrder(str, Enum): 6 ASCENDING = "ASCENDING" 7 DESCENDING = "DESCENDING" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
5class Status(str, Enum): 6 ARCHIVED = "ARCHIVED" 7 COMPLETED = "COMPLETED" 8 DELETE = "DELETE" 9 DELETED = "DELETED" 10 DELETING = "DELETING" 11 FAILED = "FAILED" 12 PENDING = "PENDING" 13 RUNNING = "RUNNING" 14 SUSPENDED = "SUSPENDED" 15 UNKNOWN = "UNKNOWN" 16 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 17 18 def __str__(self) -> str: 19 return str(self.value) 20 21 @classmethod 22 def _missing_(cls, number): 23 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
12@_attrs_define 13class StopExecutionResponse: 14 """ 15 Attributes: 16 success (Union[Unset, List[str]]): List of job IDs that were successful in termination 17 failed (Union[Unset, List[str]]): List of job IDs that were not successful in termination 18 """ 19 20 success: Union[Unset, List[str]] = UNSET 21 failed: Union[Unset, List[str]] = UNSET 22 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 23 24 def to_dict(self) -> Dict[str, Any]: 25 success: Union[Unset, List[str]] = UNSET 26 if not isinstance(self.success, Unset): 27 success = self.success 28 29 failed: Union[Unset, List[str]] = UNSET 30 if not isinstance(self.failed, Unset): 31 failed = self.failed 32 33 field_dict: Dict[str, Any] = {} 34 field_dict.update(self.additional_properties) 35 field_dict.update({}) 36 if success is not UNSET: 37 field_dict["success"] = success 38 if failed is not UNSET: 39 field_dict["failed"] = failed 40 41 return field_dict 42 43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 success = cast(List[str], d.pop("success", UNSET)) 47 48 failed = cast(List[str], d.pop("failed", UNSET)) 49 50 stop_execution_response = cls( 51 success=success, 52 failed=failed, 53 ) 54 55 stop_execution_response.additional_properties = d 56 return stop_execution_response 57 58 @property 59 def additional_keys(self) -> List[str]: 60 return list(self.additional_properties.keys())
Attributes:
- success (Union[Unset, List[str]]): List of job IDs that were successful in termination
- failed (Union[Unset, List[str]]): List of job IDs that were not successful in termination
25def __init__(self, success=attr_dict['success'].default, failed=attr_dict['failed'].default): 26 self.success = success 27 self.failed = failed 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class StopExecutionResponse.
24 def to_dict(self) -> Dict[str, Any]: 25 success: Union[Unset, List[str]] = UNSET 26 if not isinstance(self.success, Unset): 27 success = self.success 28 29 failed: Union[Unset, List[str]] = UNSET 30 if not isinstance(self.failed, Unset): 31 failed = self.failed 32 33 field_dict: Dict[str, Any] = {} 34 field_dict.update(self.additional_properties) 35 field_dict.update({}) 36 if success is not UNSET: 37 field_dict["success"] = success 38 if failed is not UNSET: 39 field_dict["failed"] = failed 40 41 return field_dict
43 @classmethod 44 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 45 d = src_dict.copy() 46 success = cast(List[str], d.pop("success", UNSET)) 47 48 failed = cast(List[str], d.pop("failed", UNSET)) 49 50 stop_execution_response = cls( 51 success=success, 52 failed=failed, 53 ) 54 55 stop_execution_response.additional_properties = d 56 return stop_execution_response
5class SyncStatus(str, Enum): 6 FAILED = "FAILED" 7 SUCCESSFUL = "SUCCESSFUL" 8 UNKNOWN = "UNKNOWN" 9 """ This is a fallback value for when the value is not known, do not use this value when making requests """ 10 11 def __str__(self) -> str: 12 return str(self.value) 13 14 @classmethod 15 def _missing_(cls, number): 16 return cls(cls.UNKNOWN)
str(object='') -> str str(bytes_or_buffer[, encoding[, errors]]) -> str
Create a new string object from the given object. If encoding or errors is specified, then the object must expose a data buffer that will be decoded using the given encoding and error handler. Otherwise, returns the result of object.__str__() (if defined) or repr(object). encoding defaults to sys.getdefaultencoding(). errors defaults to 'strict'.
This is a fallback value for when the value is not known, do not use this value when making requests
16@_attrs_define 17class SystemInfoResponse: 18 """ 19 Attributes: 20 resources_bucket (str): 21 references_bucket (str): 22 live_endpoint (str): 23 agent_endpoint (str): 24 region (str): 25 system_message (str): 26 maintenance_mode_enabled (bool): 27 commit_hash (str): 28 version (str): 29 resources_info (ResourcesInfo): 30 tenant_info (TenantInfo): 31 auth (AuthInfo): 32 """ 33 34 resources_bucket: str 35 references_bucket: str 36 live_endpoint: str 37 agent_endpoint: str 38 region: str 39 system_message: str 40 maintenance_mode_enabled: bool 41 commit_hash: str 42 version: str 43 resources_info: "ResourcesInfo" 44 tenant_info: "TenantInfo" 45 auth: "AuthInfo" 46 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 47 48 def to_dict(self) -> Dict[str, Any]: 49 resources_bucket = self.resources_bucket 50 51 references_bucket = self.references_bucket 52 53 live_endpoint = self.live_endpoint 54 55 agent_endpoint = self.agent_endpoint 56 57 region = self.region 58 59 system_message = self.system_message 60 61 maintenance_mode_enabled = self.maintenance_mode_enabled 62 63 commit_hash = self.commit_hash 64 65 version = self.version 66 67 resources_info = self.resources_info.to_dict() 68 69 tenant_info = self.tenant_info.to_dict() 70 71 auth = self.auth.to_dict() 72 73 field_dict: Dict[str, Any] = {} 74 field_dict.update(self.additional_properties) 75 field_dict.update( 76 { 77 "resourcesBucket": resources_bucket, 78 "referencesBucket": references_bucket, 79 "liveEndpoint": live_endpoint, 80 "agentEndpoint": agent_endpoint, 81 "region": region, 82 "systemMessage": system_message, 83 "maintenanceModeEnabled": maintenance_mode_enabled, 84 "commitHash": commit_hash, 85 "version": version, 86 "resourcesInfo": resources_info, 87 "tenantInfo": tenant_info, 88 "auth": auth, 89 } 90 ) 91 92 return field_dict 93 94 @classmethod 95 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 96 from ..models.auth_info import AuthInfo 97 from ..models.resources_info import ResourcesInfo 98 from ..models.tenant_info import TenantInfo 99 100 d = src_dict.copy() 101 resources_bucket = d.pop("resourcesBucket") 102 103 references_bucket = d.pop("referencesBucket") 104 105 live_endpoint = d.pop("liveEndpoint") 106 107 agent_endpoint = d.pop("agentEndpoint") 108 109 region = d.pop("region") 110 111 system_message = d.pop("systemMessage") 112 113 maintenance_mode_enabled = d.pop("maintenanceModeEnabled") 114 115 commit_hash = d.pop("commitHash") 116 117 version = d.pop("version") 118 119 resources_info = ResourcesInfo.from_dict(d.pop("resourcesInfo")) 120 121 tenant_info = TenantInfo.from_dict(d.pop("tenantInfo")) 122 123 auth = AuthInfo.from_dict(d.pop("auth")) 124 125 system_info_response = cls( 126 resources_bucket=resources_bucket, 127 references_bucket=references_bucket, 128 live_endpoint=live_endpoint, 129 agent_endpoint=agent_endpoint, 130 region=region, 131 system_message=system_message, 132 maintenance_mode_enabled=maintenance_mode_enabled, 133 commit_hash=commit_hash, 134 version=version, 135 resources_info=resources_info, 136 tenant_info=tenant_info, 137 auth=auth, 138 ) 139 140 system_info_response.additional_properties = d 141 return system_info_response 142 143 @property 144 def additional_keys(self) -> List[str]: 145 return list(self.additional_properties.keys())
Attributes:
- resources_bucket (str):
- references_bucket (str):
- live_endpoint (str):
- agent_endpoint (str):
- region (str):
- system_message (str):
- maintenance_mode_enabled (bool):
- commit_hash (str):
- version (str):
- resources_info (ResourcesInfo):
- tenant_info (TenantInfo):
- auth (AuthInfo):
35def __init__(self, resources_bucket, references_bucket, live_endpoint, agent_endpoint, region, system_message, maintenance_mode_enabled, commit_hash, version, resources_info, tenant_info, auth): 36 self.resources_bucket = resources_bucket 37 self.references_bucket = references_bucket 38 self.live_endpoint = live_endpoint 39 self.agent_endpoint = agent_endpoint 40 self.region = region 41 self.system_message = system_message 42 self.maintenance_mode_enabled = maintenance_mode_enabled 43 self.commit_hash = commit_hash 44 self.version = version 45 self.resources_info = resources_info 46 self.tenant_info = tenant_info 47 self.auth = auth 48 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class SystemInfoResponse.
48 def to_dict(self) -> Dict[str, Any]: 49 resources_bucket = self.resources_bucket 50 51 references_bucket = self.references_bucket 52 53 live_endpoint = self.live_endpoint 54 55 agent_endpoint = self.agent_endpoint 56 57 region = self.region 58 59 system_message = self.system_message 60 61 maintenance_mode_enabled = self.maintenance_mode_enabled 62 63 commit_hash = self.commit_hash 64 65 version = self.version 66 67 resources_info = self.resources_info.to_dict() 68 69 tenant_info = self.tenant_info.to_dict() 70 71 auth = self.auth.to_dict() 72 73 field_dict: Dict[str, Any] = {} 74 field_dict.update(self.additional_properties) 75 field_dict.update( 76 { 77 "resourcesBucket": resources_bucket, 78 "referencesBucket": references_bucket, 79 "liveEndpoint": live_endpoint, 80 "agentEndpoint": agent_endpoint, 81 "region": region, 82 "systemMessage": system_message, 83 "maintenanceModeEnabled": maintenance_mode_enabled, 84 "commitHash": commit_hash, 85 "version": version, 86 "resourcesInfo": resources_info, 87 "tenantInfo": tenant_info, 88 "auth": auth, 89 } 90 ) 91 92 return field_dict
94 @classmethod 95 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 96 from ..models.auth_info import AuthInfo 97 from ..models.resources_info import ResourcesInfo 98 from ..models.tenant_info import TenantInfo 99 100 d = src_dict.copy() 101 resources_bucket = d.pop("resourcesBucket") 102 103 references_bucket = d.pop("referencesBucket") 104 105 live_endpoint = d.pop("liveEndpoint") 106 107 agent_endpoint = d.pop("agentEndpoint") 108 109 region = d.pop("region") 110 111 system_message = d.pop("systemMessage") 112 113 maintenance_mode_enabled = d.pop("maintenanceModeEnabled") 114 115 commit_hash = d.pop("commitHash") 116 117 version = d.pop("version") 118 119 resources_info = ResourcesInfo.from_dict(d.pop("resourcesInfo")) 120 121 tenant_info = TenantInfo.from_dict(d.pop("tenantInfo")) 122 123 auth = AuthInfo.from_dict(d.pop("auth")) 124 125 system_info_response = cls( 126 resources_bucket=resources_bucket, 127 references_bucket=references_bucket, 128 live_endpoint=live_endpoint, 129 agent_endpoint=agent_endpoint, 130 region=region, 131 system_message=system_message, 132 maintenance_mode_enabled=maintenance_mode_enabled, 133 commit_hash=commit_hash, 134 version=version, 135 resources_info=resources_info, 136 tenant_info=tenant_info, 137 auth=auth, 138 ) 139 140 system_info_response.additional_properties = d 141 return system_info_response
16@_attrs_define 17class Table: 18 """ 19 Attributes: 20 desc (str): 21 name (Union[Unset, str]): User-friendly name of asset 22 type (Union[Unset, str]): Type of file Example: parquet. 23 rows (Union[Unset, int]): Number of rows in table 24 path (Union[Unset, str]): Relative path to asset 25 cols (Union[List['ColumnDefinition'], None, Unset]): 26 """ 27 28 desc: str 29 name: Union[Unset, str] = UNSET 30 type: Union[Unset, str] = UNSET 31 rows: Union[Unset, int] = UNSET 32 path: Union[Unset, str] = UNSET 33 cols: Union[List["ColumnDefinition"], None, Unset] = UNSET 34 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 35 36 def to_dict(self) -> Dict[str, Any]: 37 desc = self.desc 38 39 name = self.name 40 41 type = self.type 42 43 rows = self.rows 44 45 path = self.path 46 47 cols: Union[List[Dict[str, Any]], None, Unset] 48 if isinstance(self.cols, Unset): 49 cols = UNSET 50 elif isinstance(self.cols, list): 51 cols = [] 52 for cols_type_0_item_data in self.cols: 53 cols_type_0_item = cols_type_0_item_data.to_dict() 54 cols.append(cols_type_0_item) 55 56 else: 57 cols = self.cols 58 59 field_dict: Dict[str, Any] = {} 60 field_dict.update(self.additional_properties) 61 field_dict.update( 62 { 63 "desc": desc, 64 } 65 ) 66 if name is not UNSET: 67 field_dict["name"] = name 68 if type is not UNSET: 69 field_dict["type"] = type 70 if rows is not UNSET: 71 field_dict["rows"] = rows 72 if path is not UNSET: 73 field_dict["path"] = path 74 if cols is not UNSET: 75 field_dict["cols"] = cols 76 77 return field_dict 78 79 @classmethod 80 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 81 from ..models.column_definition import ColumnDefinition 82 83 d = src_dict.copy() 84 desc = d.pop("desc") 85 86 name = d.pop("name", UNSET) 87 88 type = d.pop("type", UNSET) 89 90 rows = d.pop("rows", UNSET) 91 92 path = d.pop("path", UNSET) 93 94 def _parse_cols(data: object) -> Union[List["ColumnDefinition"], None, Unset]: 95 if data is None: 96 return data 97 if isinstance(data, Unset): 98 return data 99 try: 100 if not isinstance(data, list): 101 raise TypeError() 102 cols_type_0 = [] 103 _cols_type_0 = data 104 for cols_type_0_item_data in _cols_type_0: 105 cols_type_0_item = ColumnDefinition.from_dict(cols_type_0_item_data) 106 107 cols_type_0.append(cols_type_0_item) 108 109 return cols_type_0 110 except: # noqa: E722 111 pass 112 return cast(Union[List["ColumnDefinition"], None, Unset], data) 113 114 cols = _parse_cols(d.pop("cols", UNSET)) 115 116 table = cls( 117 desc=desc, 118 name=name, 119 type=type, 120 rows=rows, 121 path=path, 122 cols=cols, 123 ) 124 125 table.additional_properties = d 126 return table 127 128 @property 129 def additional_keys(self) -> List[str]: 130 return list(self.additional_properties.keys())
Attributes:
- desc (str):
- name (Union[Unset, str]): User-friendly name of asset
- type (Union[Unset, str]): Type of file Example: parquet.
- rows (Union[Unset, int]): Number of rows in table
- path (Union[Unset, str]): Relative path to asset
- cols (Union[List['ColumnDefinition'], None, Unset]):
29def __init__(self, desc, name=attr_dict['name'].default, type=attr_dict['type'].default, rows=attr_dict['rows'].default, path=attr_dict['path'].default, cols=attr_dict['cols'].default): 30 self.desc = desc 31 self.name = name 32 self.type = type 33 self.rows = rows 34 self.path = path 35 self.cols = cols 36 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Table.
36 def to_dict(self) -> Dict[str, Any]: 37 desc = self.desc 38 39 name = self.name 40 41 type = self.type 42 43 rows = self.rows 44 45 path = self.path 46 47 cols: Union[List[Dict[str, Any]], None, Unset] 48 if isinstance(self.cols, Unset): 49 cols = UNSET 50 elif isinstance(self.cols, list): 51 cols = [] 52 for cols_type_0_item_data in self.cols: 53 cols_type_0_item = cols_type_0_item_data.to_dict() 54 cols.append(cols_type_0_item) 55 56 else: 57 cols = self.cols 58 59 field_dict: Dict[str, Any] = {} 60 field_dict.update(self.additional_properties) 61 field_dict.update( 62 { 63 "desc": desc, 64 } 65 ) 66 if name is not UNSET: 67 field_dict["name"] = name 68 if type is not UNSET: 69 field_dict["type"] = type 70 if rows is not UNSET: 71 field_dict["rows"] = rows 72 if path is not UNSET: 73 field_dict["path"] = path 74 if cols is not UNSET: 75 field_dict["cols"] = cols 76 77 return field_dict
79 @classmethod 80 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 81 from ..models.column_definition import ColumnDefinition 82 83 d = src_dict.copy() 84 desc = d.pop("desc") 85 86 name = d.pop("name", UNSET) 87 88 type = d.pop("type", UNSET) 89 90 rows = d.pop("rows", UNSET) 91 92 path = d.pop("path", UNSET) 93 94 def _parse_cols(data: object) -> Union[List["ColumnDefinition"], None, Unset]: 95 if data is None: 96 return data 97 if isinstance(data, Unset): 98 return data 99 try: 100 if not isinstance(data, list): 101 raise TypeError() 102 cols_type_0 = [] 103 _cols_type_0 = data 104 for cols_type_0_item_data in _cols_type_0: 105 cols_type_0_item = ColumnDefinition.from_dict(cols_type_0_item_data) 106 107 cols_type_0.append(cols_type_0_item) 108 109 return cols_type_0 110 except: # noqa: E722 111 pass 112 return cast(Union[List["ColumnDefinition"], None, Unset], data) 113 114 cols = _parse_cols(d.pop("cols", UNSET)) 115 116 table = cls( 117 desc=desc, 118 name=name, 119 type=type, 120 rows=rows, 121 path=path, 122 cols=cols, 123 ) 124 125 table.additional_properties = d 126 return table
12@_attrs_define 13class Tag: 14 """ 15 Attributes: 16 value (str): The value of the tag 17 editable (Union[Unset, bool]): Whether the tag value is editable Default: True. 18 key (Union[None, Unset, str]): 19 """ 20 21 value: str 22 editable: Union[Unset, bool] = True 23 key: Union[None, Unset, str] = UNSET 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 value = self.value 28 29 editable = self.editable 30 31 key: Union[None, Unset, str] 32 if isinstance(self.key, Unset): 33 key = UNSET 34 else: 35 key = self.key 36 37 field_dict: Dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "value": value, 42 } 43 ) 44 if editable is not UNSET: 45 field_dict["editable"] = editable 46 if key is not UNSET: 47 field_dict["key"] = key 48 49 return field_dict 50 51 @classmethod 52 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 53 d = src_dict.copy() 54 value = d.pop("value") 55 56 editable = d.pop("editable", UNSET) 57 58 def _parse_key(data: object) -> Union[None, Unset, str]: 59 if data is None: 60 return data 61 if isinstance(data, Unset): 62 return data 63 return cast(Union[None, Unset, str], data) 64 65 key = _parse_key(d.pop("key", UNSET)) 66 67 tag = cls( 68 value=value, 69 editable=editable, 70 key=key, 71 ) 72 73 tag.additional_properties = d 74 return tag 75 76 @property 77 def additional_keys(self) -> List[str]: 78 return list(self.additional_properties.keys())
Attributes:
- value (str): The value of the tag
- editable (Union[Unset, bool]): Whether the tag value is editable Default: True.
- key (Union[None, Unset, str]):
26def __init__(self, value, editable=attr_dict['editable'].default, key=attr_dict['key'].default): 27 self.value = value 28 self.editable = editable 29 self.key = key 30 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Tag.
26 def to_dict(self) -> Dict[str, Any]: 27 value = self.value 28 29 editable = self.editable 30 31 key: Union[None, Unset, str] 32 if isinstance(self.key, Unset): 33 key = UNSET 34 else: 35 key = self.key 36 37 field_dict: Dict[str, Any] = {} 38 field_dict.update(self.additional_properties) 39 field_dict.update( 40 { 41 "value": value, 42 } 43 ) 44 if editable is not UNSET: 45 field_dict["editable"] = editable 46 if key is not UNSET: 47 field_dict["key"] = key 48 49 return field_dict
51 @classmethod 52 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 53 d = src_dict.copy() 54 value = d.pop("value") 55 56 editable = d.pop("editable", UNSET) 57 58 def _parse_key(data: object) -> Union[None, Unset, str]: 59 if data is None: 60 return data 61 if isinstance(data, Unset): 62 return data 63 return cast(Union[None, Unset, str], data) 64 65 key = _parse_key(d.pop("key", UNSET)) 66 67 tag = cls( 68 value=value, 69 editable=editable, 70 key=key, 71 ) 72 73 tag.additional_properties = d 74 return tag
14@_attrs_define 15class Task: 16 """ 17 Attributes: 18 name (str): 19 native_job_id (str): 20 status (str): 21 requested_at (datetime.datetime): 22 started_at (Union[Unset, datetime.datetime]): 23 stopped_at (Union[Unset, datetime.datetime]): 24 container_image (Union[Unset, str]): 25 command_line (Union[Unset, str]): 26 log_location (Union[Unset, str]): 27 """ 28 29 name: str 30 native_job_id: str 31 status: str 32 requested_at: datetime.datetime 33 started_at: Union[Unset, datetime.datetime] = UNSET 34 stopped_at: Union[Unset, datetime.datetime] = UNSET 35 container_image: Union[Unset, str] = UNSET 36 command_line: Union[Unset, str] = UNSET 37 log_location: Union[Unset, str] = UNSET 38 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 39 40 def to_dict(self) -> Dict[str, Any]: 41 name = self.name 42 43 native_job_id = self.native_job_id 44 45 status = self.status 46 47 requested_at = self.requested_at.isoformat() 48 49 started_at: Union[Unset, str] = UNSET 50 if not isinstance(self.started_at, Unset): 51 started_at = self.started_at.isoformat() 52 53 stopped_at: Union[Unset, str] = UNSET 54 if not isinstance(self.stopped_at, Unset): 55 stopped_at = self.stopped_at.isoformat() 56 57 container_image = self.container_image 58 59 command_line = self.command_line 60 61 log_location = self.log_location 62 63 field_dict: Dict[str, Any] = {} 64 field_dict.update(self.additional_properties) 65 field_dict.update( 66 { 67 "name": name, 68 "nativeJobId": native_job_id, 69 "status": status, 70 "requestedAt": requested_at, 71 } 72 ) 73 if started_at is not UNSET: 74 field_dict["startedAt"] = started_at 75 if stopped_at is not UNSET: 76 field_dict["stoppedAt"] = stopped_at 77 if container_image is not UNSET: 78 field_dict["containerImage"] = container_image 79 if command_line is not UNSET: 80 field_dict["commandLine"] = command_line 81 if log_location is not UNSET: 82 field_dict["logLocation"] = log_location 83 84 return field_dict 85 86 @classmethod 87 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 88 d = src_dict.copy() 89 name = d.pop("name") 90 91 native_job_id = d.pop("nativeJobId") 92 93 status = d.pop("status") 94 95 requested_at = isoparse(d.pop("requestedAt")) 96 97 _started_at = d.pop("startedAt", UNSET) 98 started_at: Union[Unset, datetime.datetime] 99 if isinstance(_started_at, Unset): 100 started_at = UNSET 101 else: 102 started_at = isoparse(_started_at) 103 104 _stopped_at = d.pop("stoppedAt", UNSET) 105 stopped_at: Union[Unset, datetime.datetime] 106 if isinstance(_stopped_at, Unset): 107 stopped_at = UNSET 108 else: 109 stopped_at = isoparse(_stopped_at) 110 111 container_image = d.pop("containerImage", UNSET) 112 113 command_line = d.pop("commandLine", UNSET) 114 115 log_location = d.pop("logLocation", UNSET) 116 117 task = cls( 118 name=name, 119 native_job_id=native_job_id, 120 status=status, 121 requested_at=requested_at, 122 started_at=started_at, 123 stopped_at=stopped_at, 124 container_image=container_image, 125 command_line=command_line, 126 log_location=log_location, 127 ) 128 129 task.additional_properties = d 130 return task 131 132 @property 133 def additional_keys(self) -> List[str]: 134 return list(self.additional_properties.keys())
Attributes:
- name (str):
- native_job_id (str):
- status (str):
- requested_at (datetime.datetime):
- started_at (Union[Unset, datetime.datetime]):
- stopped_at (Union[Unset, datetime.datetime]):
- container_image (Union[Unset, str]):
- command_line (Union[Unset, str]):
- log_location (Union[Unset, str]):
32def __init__(self, name, native_job_id, status, requested_at, started_at=attr_dict['started_at'].default, stopped_at=attr_dict['stopped_at'].default, container_image=attr_dict['container_image'].default, command_line=attr_dict['command_line'].default, log_location=attr_dict['log_location'].default): 33 self.name = name 34 self.native_job_id = native_job_id 35 self.status = status 36 self.requested_at = requested_at 37 self.started_at = started_at 38 self.stopped_at = stopped_at 39 self.container_image = container_image 40 self.command_line = command_line 41 self.log_location = log_location 42 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class Task.
40 def to_dict(self) -> Dict[str, Any]: 41 name = self.name 42 43 native_job_id = self.native_job_id 44 45 status = self.status 46 47 requested_at = self.requested_at.isoformat() 48 49 started_at: Union[Unset, str] = UNSET 50 if not isinstance(self.started_at, Unset): 51 started_at = self.started_at.isoformat() 52 53 stopped_at: Union[Unset, str] = UNSET 54 if not isinstance(self.stopped_at, Unset): 55 stopped_at = self.stopped_at.isoformat() 56 57 container_image = self.container_image 58 59 command_line = self.command_line 60 61 log_location = self.log_location 62 63 field_dict: Dict[str, Any] = {} 64 field_dict.update(self.additional_properties) 65 field_dict.update( 66 { 67 "name": name, 68 "nativeJobId": native_job_id, 69 "status": status, 70 "requestedAt": requested_at, 71 } 72 ) 73 if started_at is not UNSET: 74 field_dict["startedAt"] = started_at 75 if stopped_at is not UNSET: 76 field_dict["stoppedAt"] = stopped_at 77 if container_image is not UNSET: 78 field_dict["containerImage"] = container_image 79 if command_line is not UNSET: 80 field_dict["commandLine"] = command_line 81 if log_location is not UNSET: 82 field_dict["logLocation"] = log_location 83 84 return field_dict
86 @classmethod 87 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 88 d = src_dict.copy() 89 name = d.pop("name") 90 91 native_job_id = d.pop("nativeJobId") 92 93 status = d.pop("status") 94 95 requested_at = isoparse(d.pop("requestedAt")) 96 97 _started_at = d.pop("startedAt", UNSET) 98 started_at: Union[Unset, datetime.datetime] 99 if isinstance(_started_at, Unset): 100 started_at = UNSET 101 else: 102 started_at = isoparse(_started_at) 103 104 _stopped_at = d.pop("stoppedAt", UNSET) 105 stopped_at: Union[Unset, datetime.datetime] 106 if isinstance(_stopped_at, Unset): 107 stopped_at = UNSET 108 else: 109 stopped_at = isoparse(_stopped_at) 110 111 container_image = d.pop("containerImage", UNSET) 112 113 command_line = d.pop("commandLine", UNSET) 114 115 log_location = d.pop("logLocation", UNSET) 116 117 task = cls( 118 name=name, 119 native_job_id=native_job_id, 120 status=status, 121 requested_at=requested_at, 122 started_at=started_at, 123 stopped_at=stopped_at, 124 container_image=container_image, 125 command_line=command_line, 126 log_location=log_location, 127 ) 128 129 task.additional_properties = d 130 return task
15@_attrs_define 16class TenantInfo: 17 """ 18 Attributes: 19 id (str): 20 name (str): 21 description (str): 22 location (str): 23 contact_email (str): 24 tenant_logo_url (str): 25 terms_of_service_url (str): 26 privacy_policy_url (str): 27 login_providers (List['LoginProvider']): 28 features (FeatureFlags): 29 """ 30 31 id: str 32 name: str 33 description: str 34 location: str 35 contact_email: str 36 tenant_logo_url: str 37 terms_of_service_url: str 38 privacy_policy_url: str 39 login_providers: List["LoginProvider"] 40 features: "FeatureFlags" 41 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 42 43 def to_dict(self) -> Dict[str, Any]: 44 id = self.id 45 46 name = self.name 47 48 description = self.description 49 50 location = self.location 51 52 contact_email = self.contact_email 53 54 tenant_logo_url = self.tenant_logo_url 55 56 terms_of_service_url = self.terms_of_service_url 57 58 privacy_policy_url = self.privacy_policy_url 59 60 login_providers = [] 61 for login_providers_item_data in self.login_providers: 62 login_providers_item = login_providers_item_data.to_dict() 63 login_providers.append(login_providers_item) 64 65 features = self.features.to_dict() 66 67 field_dict: Dict[str, Any] = {} 68 field_dict.update(self.additional_properties) 69 field_dict.update( 70 { 71 "id": id, 72 "name": name, 73 "description": description, 74 "location": location, 75 "contactEmail": contact_email, 76 "tenantLogoUrl": tenant_logo_url, 77 "termsOfServiceUrl": terms_of_service_url, 78 "privacyPolicyUrl": privacy_policy_url, 79 "loginProviders": login_providers, 80 "features": features, 81 } 82 ) 83 84 return field_dict 85 86 @classmethod 87 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 88 from ..models.feature_flags import FeatureFlags 89 from ..models.login_provider import LoginProvider 90 91 d = src_dict.copy() 92 id = d.pop("id") 93 94 name = d.pop("name") 95 96 description = d.pop("description") 97 98 location = d.pop("location") 99 100 contact_email = d.pop("contactEmail") 101 102 tenant_logo_url = d.pop("tenantLogoUrl") 103 104 terms_of_service_url = d.pop("termsOfServiceUrl") 105 106 privacy_policy_url = d.pop("privacyPolicyUrl") 107 108 login_providers = [] 109 _login_providers = d.pop("loginProviders") 110 for login_providers_item_data in _login_providers: 111 login_providers_item = LoginProvider.from_dict(login_providers_item_data) 112 113 login_providers.append(login_providers_item) 114 115 features = FeatureFlags.from_dict(d.pop("features")) 116 117 tenant_info = cls( 118 id=id, 119 name=name, 120 description=description, 121 location=location, 122 contact_email=contact_email, 123 tenant_logo_url=tenant_logo_url, 124 terms_of_service_url=terms_of_service_url, 125 privacy_policy_url=privacy_policy_url, 126 login_providers=login_providers, 127 features=features, 128 ) 129 130 tenant_info.additional_properties = d 131 return tenant_info 132 133 @property 134 def additional_keys(self) -> List[str]: 135 return list(self.additional_properties.keys())
Attributes:
- id (str):
- name (str):
- description (str):
- location (str):
- contact_email (str):
- tenant_logo_url (str):
- terms_of_service_url (str):
- privacy_policy_url (str):
- login_providers (List['LoginProvider']):
- features (FeatureFlags):
33def __init__(self, id, name, description, location, contact_email, tenant_logo_url, terms_of_service_url, privacy_policy_url, login_providers, features): 34 self.id = id 35 self.name = name 36 self.description = description 37 self.location = location 38 self.contact_email = contact_email 39 self.tenant_logo_url = tenant_logo_url 40 self.terms_of_service_url = terms_of_service_url 41 self.privacy_policy_url = privacy_policy_url 42 self.login_providers = login_providers 43 self.features = features 44 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class TenantInfo.
43 def to_dict(self) -> Dict[str, Any]: 44 id = self.id 45 46 name = self.name 47 48 description = self.description 49 50 location = self.location 51 52 contact_email = self.contact_email 53 54 tenant_logo_url = self.tenant_logo_url 55 56 terms_of_service_url = self.terms_of_service_url 57 58 privacy_policy_url = self.privacy_policy_url 59 60 login_providers = [] 61 for login_providers_item_data in self.login_providers: 62 login_providers_item = login_providers_item_data.to_dict() 63 login_providers.append(login_providers_item) 64 65 features = self.features.to_dict() 66 67 field_dict: Dict[str, Any] = {} 68 field_dict.update(self.additional_properties) 69 field_dict.update( 70 { 71 "id": id, 72 "name": name, 73 "description": description, 74 "location": location, 75 "contactEmail": contact_email, 76 "tenantLogoUrl": tenant_logo_url, 77 "termsOfServiceUrl": terms_of_service_url, 78 "privacyPolicyUrl": privacy_policy_url, 79 "loginProviders": login_providers, 80 "features": features, 81 } 82 ) 83 84 return field_dict
86 @classmethod 87 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 88 from ..models.feature_flags import FeatureFlags 89 from ..models.login_provider import LoginProvider 90 91 d = src_dict.copy() 92 id = d.pop("id") 93 94 name = d.pop("name") 95 96 description = d.pop("description") 97 98 location = d.pop("location") 99 100 contact_email = d.pop("contactEmail") 101 102 tenant_logo_url = d.pop("tenantLogoUrl") 103 104 terms_of_service_url = d.pop("termsOfServiceUrl") 105 106 privacy_policy_url = d.pop("privacyPolicyUrl") 107 108 login_providers = [] 109 _login_providers = d.pop("loginProviders") 110 for login_providers_item_data in _login_providers: 111 login_providers_item = LoginProvider.from_dict(login_providers_item_data) 112 113 login_providers.append(login_providers_item) 114 115 features = FeatureFlags.from_dict(d.pop("features")) 116 117 tenant_info = cls( 118 id=id, 119 name=name, 120 description=description, 121 location=location, 122 contact_email=contact_email, 123 tenant_logo_url=tenant_logo_url, 124 terms_of_service_url=terms_of_service_url, 125 privacy_policy_url=privacy_policy_url, 126 login_providers=login_providers, 127 features=features, 128 ) 129 130 tenant_info.additional_properties = d 131 return tenant_info
14@_attrs_define 15class UpdateDatasetRequest: 16 """ 17 Attributes: 18 name (str): 19 description (str): 20 process_id (str): 21 tags (List['Tag']): 22 """ 23 24 name: str 25 description: str 26 process_id: str 27 tags: List["Tag"] 28 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 29 30 def to_dict(self) -> Dict[str, Any]: 31 name = self.name 32 33 description = self.description 34 35 process_id = self.process_id 36 37 tags = [] 38 for tags_item_data in self.tags: 39 tags_item = tags_item_data.to_dict() 40 tags.append(tags_item) 41 42 field_dict: Dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "name": name, 47 "description": description, 48 "processId": process_id, 49 "tags": tags, 50 } 51 ) 52 53 return field_dict 54 55 @classmethod 56 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 57 from ..models.tag import Tag 58 59 d = src_dict.copy() 60 name = d.pop("name") 61 62 description = d.pop("description") 63 64 process_id = d.pop("processId") 65 66 tags = [] 67 _tags = d.pop("tags") 68 for tags_item_data in _tags: 69 tags_item = Tag.from_dict(tags_item_data) 70 71 tags.append(tags_item) 72 73 update_dataset_request = cls( 74 name=name, 75 description=description, 76 process_id=process_id, 77 tags=tags, 78 ) 79 80 update_dataset_request.additional_properties = d 81 return update_dataset_request 82 83 @property 84 def additional_keys(self) -> List[str]: 85 return list(self.additional_properties.keys())
Attributes:
- name (str):
- description (str):
- process_id (str):
- tags (List['Tag']):
27def __init__(self, name, description, process_id, tags): 28 self.name = name 29 self.description = description 30 self.process_id = process_id 31 self.tags = tags 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UpdateDatasetRequest.
30 def to_dict(self) -> Dict[str, Any]: 31 name = self.name 32 33 description = self.description 34 35 process_id = self.process_id 36 37 tags = [] 38 for tags_item_data in self.tags: 39 tags_item = tags_item_data.to_dict() 40 tags.append(tags_item) 41 42 field_dict: Dict[str, Any] = {} 43 field_dict.update(self.additional_properties) 44 field_dict.update( 45 { 46 "name": name, 47 "description": description, 48 "processId": process_id, 49 "tags": tags, 50 } 51 ) 52 53 return field_dict
55 @classmethod 56 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 57 from ..models.tag import Tag 58 59 d = src_dict.copy() 60 name = d.pop("name") 61 62 description = d.pop("description") 63 64 process_id = d.pop("processId") 65 66 tags = [] 67 _tags = d.pop("tags") 68 for tags_item_data in _tags: 69 tags_item = Tag.from_dict(tags_item_data) 70 71 tags.append(tags_item) 72 73 update_dataset_request = cls( 74 name=name, 75 description=description, 76 process_id=process_id, 77 tags=tags, 78 ) 79 80 update_dataset_request.additional_properties = d 81 return update_dataset_request
16@_attrs_define 17class UpdateUserRequest: 18 """ 19 Attributes: 20 name (str): Display name of the user 21 email (str): Email address of the user 22 phone (Union[Unset, str]): Phone number of the user 23 department (Union[Unset, str]): Department or lab the user belongs to 24 job_title (Union[Unset, str]): Job title or role of the user 25 organization (Union[Unset, str]): The organization the user belongs to, only editable by administrators 26 settings (Union['UserSettings', None, Unset]): 27 groups (Union[Unset, List[str]]): Groups the user belongs to, only editable by administrators 28 """ 29 30 name: str 31 email: str 32 phone: Union[Unset, str] = UNSET 33 department: Union[Unset, str] = UNSET 34 job_title: Union[Unset, str] = UNSET 35 organization: Union[Unset, str] = UNSET 36 settings: Union["UserSettings", None, Unset] = UNSET 37 groups: Union[Unset, List[str]] = UNSET 38 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 39 40 def to_dict(self) -> Dict[str, Any]: 41 from ..models.user_settings import UserSettings 42 43 name = self.name 44 45 email = self.email 46 47 phone = self.phone 48 49 department = self.department 50 51 job_title = self.job_title 52 53 organization = self.organization 54 55 settings: Union[Dict[str, Any], None, Unset] 56 if isinstance(self.settings, Unset): 57 settings = UNSET 58 elif isinstance(self.settings, UserSettings): 59 settings = self.settings.to_dict() 60 else: 61 settings = self.settings 62 63 groups: Union[Unset, List[str]] = UNSET 64 if not isinstance(self.groups, Unset): 65 groups = self.groups 66 67 field_dict: Dict[str, Any] = {} 68 field_dict.update(self.additional_properties) 69 field_dict.update( 70 { 71 "name": name, 72 "email": email, 73 } 74 ) 75 if phone is not UNSET: 76 field_dict["phone"] = phone 77 if department is not UNSET: 78 field_dict["department"] = department 79 if job_title is not UNSET: 80 field_dict["jobTitle"] = job_title 81 if organization is not UNSET: 82 field_dict["organization"] = organization 83 if settings is not UNSET: 84 field_dict["settings"] = settings 85 if groups is not UNSET: 86 field_dict["groups"] = groups 87 88 return field_dict 89 90 @classmethod 91 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 92 from ..models.user_settings import UserSettings 93 94 d = src_dict.copy() 95 name = d.pop("name") 96 97 email = d.pop("email") 98 99 phone = d.pop("phone", UNSET) 100 101 department = d.pop("department", UNSET) 102 103 job_title = d.pop("jobTitle", UNSET) 104 105 organization = d.pop("organization", UNSET) 106 107 def _parse_settings(data: object) -> Union["UserSettings", None, Unset]: 108 if data is None: 109 return data 110 if isinstance(data, Unset): 111 return data 112 try: 113 if not isinstance(data, dict): 114 raise TypeError() 115 settings_type_1 = UserSettings.from_dict(data) 116 117 return settings_type_1 118 except: # noqa: E722 119 pass 120 return cast(Union["UserSettings", None, Unset], data) 121 122 settings = _parse_settings(d.pop("settings", UNSET)) 123 124 groups = cast(List[str], d.pop("groups", UNSET)) 125 126 update_user_request = cls( 127 name=name, 128 email=email, 129 phone=phone, 130 department=department, 131 job_title=job_title, 132 organization=organization, 133 settings=settings, 134 groups=groups, 135 ) 136 137 update_user_request.additional_properties = d 138 return update_user_request 139 140 @property 141 def additional_keys(self) -> List[str]: 142 return list(self.additional_properties.keys())
Attributes:
- name (str): Display name of the user
- email (str): Email address of the user
- phone (Union[Unset, str]): Phone number of the user
- department (Union[Unset, str]): Department or lab the user belongs to
- job_title (Union[Unset, str]): Job title or role of the user
- organization (Union[Unset, str]): The organization the user belongs to, only editable by administrators
- settings (Union['UserSettings', None, Unset]):
- groups (Union[Unset, List[str]]): Groups the user belongs to, only editable by administrators
31def __init__(self, name, email, phone=attr_dict['phone'].default, department=attr_dict['department'].default, job_title=attr_dict['job_title'].default, organization=attr_dict['organization'].default, settings=attr_dict['settings'].default, groups=attr_dict['groups'].default): 32 self.name = name 33 self.email = email 34 self.phone = phone 35 self.department = department 36 self.job_title = job_title 37 self.organization = organization 38 self.settings = settings 39 self.groups = groups 40 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UpdateUserRequest.
40 def to_dict(self) -> Dict[str, Any]: 41 from ..models.user_settings import UserSettings 42 43 name = self.name 44 45 email = self.email 46 47 phone = self.phone 48 49 department = self.department 50 51 job_title = self.job_title 52 53 organization = self.organization 54 55 settings: Union[Dict[str, Any], None, Unset] 56 if isinstance(self.settings, Unset): 57 settings = UNSET 58 elif isinstance(self.settings, UserSettings): 59 settings = self.settings.to_dict() 60 else: 61 settings = self.settings 62 63 groups: Union[Unset, List[str]] = UNSET 64 if not isinstance(self.groups, Unset): 65 groups = self.groups 66 67 field_dict: Dict[str, Any] = {} 68 field_dict.update(self.additional_properties) 69 field_dict.update( 70 { 71 "name": name, 72 "email": email, 73 } 74 ) 75 if phone is not UNSET: 76 field_dict["phone"] = phone 77 if department is not UNSET: 78 field_dict["department"] = department 79 if job_title is not UNSET: 80 field_dict["jobTitle"] = job_title 81 if organization is not UNSET: 82 field_dict["organization"] = organization 83 if settings is not UNSET: 84 field_dict["settings"] = settings 85 if groups is not UNSET: 86 field_dict["groups"] = groups 87 88 return field_dict
90 @classmethod 91 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 92 from ..models.user_settings import UserSettings 93 94 d = src_dict.copy() 95 name = d.pop("name") 96 97 email = d.pop("email") 98 99 phone = d.pop("phone", UNSET) 100 101 department = d.pop("department", UNSET) 102 103 job_title = d.pop("jobTitle", UNSET) 104 105 organization = d.pop("organization", UNSET) 106 107 def _parse_settings(data: object) -> Union["UserSettings", None, Unset]: 108 if data is None: 109 return data 110 if isinstance(data, Unset): 111 return data 112 try: 113 if not isinstance(data, dict): 114 raise TypeError() 115 settings_type_1 = UserSettings.from_dict(data) 116 117 return settings_type_1 118 except: # noqa: E722 119 pass 120 return cast(Union["UserSettings", None, Unset], data) 121 122 settings = _parse_settings(d.pop("settings", UNSET)) 123 124 groups = cast(List[str], d.pop("groups", UNSET)) 125 126 update_user_request = cls( 127 name=name, 128 email=email, 129 phone=phone, 130 department=department, 131 job_title=job_title, 132 organization=organization, 133 settings=settings, 134 groups=groups, 135 ) 136 137 update_user_request.additional_properties = d 138 return update_user_request
10@_attrs_define 11class UploadDatasetCreateResponse: 12 """ 13 Attributes: 14 id (str): 15 message (str): 16 upload_path (str): 17 bucket (str): 18 """ 19 20 id: str 21 message: str 22 upload_path: str 23 bucket: str 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 id = self.id 28 29 message = self.message 30 31 upload_path = self.upload_path 32 33 bucket = self.bucket 34 35 field_dict: Dict[str, Any] = {} 36 field_dict.update(self.additional_properties) 37 field_dict.update( 38 { 39 "id": id, 40 "message": message, 41 "uploadPath": upload_path, 42 "bucket": bucket, 43 } 44 ) 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 50 d = src_dict.copy() 51 id = d.pop("id") 52 53 message = d.pop("message") 54 55 upload_path = d.pop("uploadPath") 56 57 bucket = d.pop("bucket") 58 59 upload_dataset_create_response = cls( 60 id=id, 61 message=message, 62 upload_path=upload_path, 63 bucket=bucket, 64 ) 65 66 upload_dataset_create_response.additional_properties = d 67 return upload_dataset_create_response 68 69 @property 70 def additional_keys(self) -> List[str]: 71 return list(self.additional_properties.keys())
Attributes:
- id (str):
- message (str):
- upload_path (str):
- bucket (str):
27def __init__(self, id, message, upload_path, bucket): 28 self.id = id 29 self.message = message 30 self.upload_path = upload_path 31 self.bucket = bucket 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UploadDatasetCreateResponse.
26 def to_dict(self) -> Dict[str, Any]: 27 id = self.id 28 29 message = self.message 30 31 upload_path = self.upload_path 32 33 bucket = self.bucket 34 35 field_dict: Dict[str, Any] = {} 36 field_dict.update(self.additional_properties) 37 field_dict.update( 38 { 39 "id": id, 40 "message": message, 41 "uploadPath": upload_path, 42 "bucket": bucket, 43 } 44 ) 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 50 d = src_dict.copy() 51 id = d.pop("id") 52 53 message = d.pop("message") 54 55 upload_path = d.pop("uploadPath") 56 57 bucket = d.pop("bucket") 58 59 upload_dataset_create_response = cls( 60 id=id, 61 message=message, 62 upload_path=upload_path, 63 bucket=bucket, 64 ) 65 66 upload_dataset_create_response.additional_properties = d 67 return upload_dataset_create_response
16@_attrs_define 17class UploadDatasetRequest: 18 """ 19 Attributes: 20 name (str): Name of the dataset 21 process_id (str): ID of the ingest process Example: paired_dnaseq. 22 expected_files (List[str]): 23 description (Union[Unset, str]): Description of the dataset 24 tags (Union[List['Tag'], None, Unset]): List of tags to apply to the dataset 25 """ 26 27 name: str 28 process_id: str 29 expected_files: List[str] 30 description: Union[Unset, str] = UNSET 31 tags: Union[List["Tag"], None, Unset] = UNSET 32 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 33 34 def to_dict(self) -> Dict[str, Any]: 35 name = self.name 36 37 process_id = self.process_id 38 39 expected_files = self.expected_files 40 41 description = self.description 42 43 tags: Union[List[Dict[str, Any]], None, Unset] 44 if isinstance(self.tags, Unset): 45 tags = UNSET 46 elif isinstance(self.tags, list): 47 tags = [] 48 for tags_type_0_item_data in self.tags: 49 tags_type_0_item = tags_type_0_item_data.to_dict() 50 tags.append(tags_type_0_item) 51 52 else: 53 tags = self.tags 54 55 field_dict: Dict[str, Any] = {} 56 field_dict.update(self.additional_properties) 57 field_dict.update( 58 { 59 "name": name, 60 "processId": process_id, 61 "expectedFiles": expected_files, 62 } 63 ) 64 if description is not UNSET: 65 field_dict["description"] = description 66 if tags is not UNSET: 67 field_dict["tags"] = tags 68 69 return field_dict 70 71 @classmethod 72 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 73 from ..models.tag import Tag 74 75 d = src_dict.copy() 76 name = d.pop("name") 77 78 process_id = d.pop("processId") 79 80 expected_files = cast(List[str], d.pop("expectedFiles")) 81 82 description = d.pop("description", UNSET) 83 84 def _parse_tags(data: object) -> Union[List["Tag"], None, Unset]: 85 if data is None: 86 return data 87 if isinstance(data, Unset): 88 return data 89 try: 90 if not isinstance(data, list): 91 raise TypeError() 92 tags_type_0 = [] 93 _tags_type_0 = data 94 for tags_type_0_item_data in _tags_type_0: 95 tags_type_0_item = Tag.from_dict(tags_type_0_item_data) 96 97 tags_type_0.append(tags_type_0_item) 98 99 return tags_type_0 100 except: # noqa: E722 101 pass 102 return cast(Union[List["Tag"], None, Unset], data) 103 104 tags = _parse_tags(d.pop("tags", UNSET)) 105 106 upload_dataset_request = cls( 107 name=name, 108 process_id=process_id, 109 expected_files=expected_files, 110 description=description, 111 tags=tags, 112 ) 113 114 upload_dataset_request.additional_properties = d 115 return upload_dataset_request 116 117 @property 118 def additional_keys(self) -> List[str]: 119 return list(self.additional_properties.keys())
Attributes:
- name (str): Name of the dataset
- process_id (str): ID of the ingest process Example: paired_dnaseq.
- expected_files (List[str]):
- description (Union[Unset, str]): Description of the dataset
- tags (Union[List['Tag'], None, Unset]): List of tags to apply to the dataset
28def __init__(self, name, process_id, expected_files, description=attr_dict['description'].default, tags=attr_dict['tags'].default): 29 self.name = name 30 self.process_id = process_id 31 self.expected_files = expected_files 32 self.description = description 33 self.tags = tags 34 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UploadDatasetRequest.
34 def to_dict(self) -> Dict[str, Any]: 35 name = self.name 36 37 process_id = self.process_id 38 39 expected_files = self.expected_files 40 41 description = self.description 42 43 tags: Union[List[Dict[str, Any]], None, Unset] 44 if isinstance(self.tags, Unset): 45 tags = UNSET 46 elif isinstance(self.tags, list): 47 tags = [] 48 for tags_type_0_item_data in self.tags: 49 tags_type_0_item = tags_type_0_item_data.to_dict() 50 tags.append(tags_type_0_item) 51 52 else: 53 tags = self.tags 54 55 field_dict: Dict[str, Any] = {} 56 field_dict.update(self.additional_properties) 57 field_dict.update( 58 { 59 "name": name, 60 "processId": process_id, 61 "expectedFiles": expected_files, 62 } 63 ) 64 if description is not UNSET: 65 field_dict["description"] = description 66 if tags is not UNSET: 67 field_dict["tags"] = tags 68 69 return field_dict
71 @classmethod 72 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 73 from ..models.tag import Tag 74 75 d = src_dict.copy() 76 name = d.pop("name") 77 78 process_id = d.pop("processId") 79 80 expected_files = cast(List[str], d.pop("expectedFiles")) 81 82 description = d.pop("description", UNSET) 83 84 def _parse_tags(data: object) -> Union[List["Tag"], None, Unset]: 85 if data is None: 86 return data 87 if isinstance(data, Unset): 88 return data 89 try: 90 if not isinstance(data, list): 91 raise TypeError() 92 tags_type_0 = [] 93 _tags_type_0 = data 94 for tags_type_0_item_data in _tags_type_0: 95 tags_type_0_item = Tag.from_dict(tags_type_0_item_data) 96 97 tags_type_0.append(tags_type_0_item) 98 99 return tags_type_0 100 except: # noqa: E722 101 pass 102 return cast(Union[List["Tag"], None, Unset], data) 103 104 tags = _parse_tags(d.pop("tags", UNSET)) 105 106 upload_dataset_request = cls( 107 name=name, 108 process_id=process_id, 109 expected_files=expected_files, 110 description=description, 111 tags=tags, 112 ) 113 114 upload_dataset_request.additional_properties = d 115 return upload_dataset_request
10@_attrs_define 11class User: 12 """ 13 Attributes: 14 name (str): 15 username (str): 16 organization (str): 17 department (str): 18 """ 19 20 name: str 21 username: str 22 organization: str 23 department: str 24 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 25 26 def to_dict(self) -> Dict[str, Any]: 27 name = self.name 28 29 username = self.username 30 31 organization = self.organization 32 33 department = self.department 34 35 field_dict: Dict[str, Any] = {} 36 field_dict.update(self.additional_properties) 37 field_dict.update( 38 { 39 "name": name, 40 "username": username, 41 "organization": organization, 42 "department": department, 43 } 44 ) 45 46 return field_dict 47 48 @classmethod 49 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 50 d = src_dict.copy() 51 name = d.pop("name") 52 53 username = d.pop("username") 54 55 organization = d.pop("organization") 56 57 department = d.pop("department") 58 59 user = cls( 60 name=name, 61 username=username, 62 organization=organization, 63 department=department, 64 ) 65 66 user.additional_properties = d 67 return user 68 69 @property 70 def additional_keys(self) -> List[str]: 71 return list(self.additional_properties.keys())
Attributes:
- name (str):
- username (str):
- organization (str):
- department (str):
27def __init__(self, name, username, organization, department): 28 self.name = name 29 self.username = username 30 self.organization = organization 31 self.department = department 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class User.
26 def to_dict(self) -> Dict[str, Any]: 27 name = self.name 28 29 username = self.username 30 31 organization = self.organization 32 33 department = self.department 34 35 field_dict: Dict[str, Any] = {} 36 field_dict.update(self.additional_properties) 37 field_dict.update( 38 { 39 "name": name, 40 "username": username, 41 "organization": organization, 42 "department": department, 43 } 44 ) 45 46 return field_dict
48 @classmethod 49 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 50 d = src_dict.copy() 51 name = d.pop("name") 52 53 username = d.pop("username") 54 55 organization = d.pop("organization") 56 57 department = d.pop("department") 58 59 user = cls( 60 name=name, 61 username=username, 62 organization=organization, 63 department=department, 64 ) 65 66 user.additional_properties = d 67 return user
19@_attrs_define 20class UserDetail: 21 """ 22 Attributes: 23 username (str): 24 name (str): 25 phone (str): 26 email (str): 27 organization (str): 28 job_title (str): 29 department (str): 30 invited_by (str): 31 project_assignments (List['UserProjectAssignment']): 32 groups (List[str]): 33 settings (UserSettings): Additional settings for the user 34 sign_up_time (Union[None, Unset, datetime.datetime]): 35 last_signed_in (Union[None, Unset, datetime.datetime]): 36 """ 37 38 username: str 39 name: str 40 phone: str 41 email: str 42 organization: str 43 job_title: str 44 department: str 45 invited_by: str 46 project_assignments: List["UserProjectAssignment"] 47 groups: List[str] 48 settings: "UserSettings" 49 sign_up_time: Union[None, Unset, datetime.datetime] = UNSET 50 last_signed_in: Union[None, Unset, datetime.datetime] = UNSET 51 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 52 53 def to_dict(self) -> Dict[str, Any]: 54 username = self.username 55 56 name = self.name 57 58 phone = self.phone 59 60 email = self.email 61 62 organization = self.organization 63 64 job_title = self.job_title 65 66 department = self.department 67 68 invited_by = self.invited_by 69 70 project_assignments = [] 71 for project_assignments_item_data in self.project_assignments: 72 project_assignments_item = project_assignments_item_data.to_dict() 73 project_assignments.append(project_assignments_item) 74 75 groups = self.groups 76 77 settings = self.settings.to_dict() 78 79 sign_up_time: Union[None, Unset, str] 80 if isinstance(self.sign_up_time, Unset): 81 sign_up_time = UNSET 82 elif isinstance(self.sign_up_time, datetime.datetime): 83 sign_up_time = self.sign_up_time.isoformat() 84 else: 85 sign_up_time = self.sign_up_time 86 87 last_signed_in: Union[None, Unset, str] 88 if isinstance(self.last_signed_in, Unset): 89 last_signed_in = UNSET 90 elif isinstance(self.last_signed_in, datetime.datetime): 91 last_signed_in = self.last_signed_in.isoformat() 92 else: 93 last_signed_in = self.last_signed_in 94 95 field_dict: Dict[str, Any] = {} 96 field_dict.update(self.additional_properties) 97 field_dict.update( 98 { 99 "username": username, 100 "name": name, 101 "phone": phone, 102 "email": email, 103 "organization": organization, 104 "jobTitle": job_title, 105 "department": department, 106 "invitedBy": invited_by, 107 "projectAssignments": project_assignments, 108 "groups": groups, 109 "settings": settings, 110 } 111 ) 112 if sign_up_time is not UNSET: 113 field_dict["signUpTime"] = sign_up_time 114 if last_signed_in is not UNSET: 115 field_dict["lastSignedIn"] = last_signed_in 116 117 return field_dict 118 119 @classmethod 120 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 121 from ..models.user_project_assignment import UserProjectAssignment 122 from ..models.user_settings import UserSettings 123 124 d = src_dict.copy() 125 username = d.pop("username") 126 127 name = d.pop("name") 128 129 phone = d.pop("phone") 130 131 email = d.pop("email") 132 133 organization = d.pop("organization") 134 135 job_title = d.pop("jobTitle") 136 137 department = d.pop("department") 138 139 invited_by = d.pop("invitedBy") 140 141 project_assignments = [] 142 _project_assignments = d.pop("projectAssignments") 143 for project_assignments_item_data in _project_assignments: 144 project_assignments_item = UserProjectAssignment.from_dict(project_assignments_item_data) 145 146 project_assignments.append(project_assignments_item) 147 148 groups = cast(List[str], d.pop("groups")) 149 150 settings = UserSettings.from_dict(d.pop("settings")) 151 152 def _parse_sign_up_time(data: object) -> Union[None, Unset, datetime.datetime]: 153 if data is None: 154 return data 155 if isinstance(data, Unset): 156 return data 157 try: 158 if not isinstance(data, str): 159 raise TypeError() 160 sign_up_time_type_0 = isoparse(data) 161 162 return sign_up_time_type_0 163 except: # noqa: E722 164 pass 165 return cast(Union[None, Unset, datetime.datetime], data) 166 167 sign_up_time = _parse_sign_up_time(d.pop("signUpTime", UNSET)) 168 169 def _parse_last_signed_in(data: object) -> Union[None, Unset, datetime.datetime]: 170 if data is None: 171 return data 172 if isinstance(data, Unset): 173 return data 174 try: 175 if not isinstance(data, str): 176 raise TypeError() 177 last_signed_in_type_0 = isoparse(data) 178 179 return last_signed_in_type_0 180 except: # noqa: E722 181 pass 182 return cast(Union[None, Unset, datetime.datetime], data) 183 184 last_signed_in = _parse_last_signed_in(d.pop("lastSignedIn", UNSET)) 185 186 user_detail = cls( 187 username=username, 188 name=name, 189 phone=phone, 190 email=email, 191 organization=organization, 192 job_title=job_title, 193 department=department, 194 invited_by=invited_by, 195 project_assignments=project_assignments, 196 groups=groups, 197 settings=settings, 198 sign_up_time=sign_up_time, 199 last_signed_in=last_signed_in, 200 ) 201 202 user_detail.additional_properties = d 203 return user_detail 204 205 @property 206 def additional_keys(self) -> List[str]: 207 return list(self.additional_properties.keys())
Attributes:
- username (str):
- name (str):
- phone (str):
- email (str):
- organization (str):
- job_title (str):
- department (str):
- invited_by (str):
- project_assignments (List['UserProjectAssignment']):
- groups (List[str]):
- settings (UserSettings): Additional settings for the user
- sign_up_time (Union[None, Unset, datetime.datetime]):
- last_signed_in (Union[None, Unset, datetime.datetime]):
36def __init__(self, username, name, phone, email, organization, job_title, department, invited_by, project_assignments, groups, settings, sign_up_time=attr_dict['sign_up_time'].default, last_signed_in=attr_dict['last_signed_in'].default): 37 self.username = username 38 self.name = name 39 self.phone = phone 40 self.email = email 41 self.organization = organization 42 self.job_title = job_title 43 self.department = department 44 self.invited_by = invited_by 45 self.project_assignments = project_assignments 46 self.groups = groups 47 self.settings = settings 48 self.sign_up_time = sign_up_time 49 self.last_signed_in = last_signed_in 50 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UserDetail.
53 def to_dict(self) -> Dict[str, Any]: 54 username = self.username 55 56 name = self.name 57 58 phone = self.phone 59 60 email = self.email 61 62 organization = self.organization 63 64 job_title = self.job_title 65 66 department = self.department 67 68 invited_by = self.invited_by 69 70 project_assignments = [] 71 for project_assignments_item_data in self.project_assignments: 72 project_assignments_item = project_assignments_item_data.to_dict() 73 project_assignments.append(project_assignments_item) 74 75 groups = self.groups 76 77 settings = self.settings.to_dict() 78 79 sign_up_time: Union[None, Unset, str] 80 if isinstance(self.sign_up_time, Unset): 81 sign_up_time = UNSET 82 elif isinstance(self.sign_up_time, datetime.datetime): 83 sign_up_time = self.sign_up_time.isoformat() 84 else: 85 sign_up_time = self.sign_up_time 86 87 last_signed_in: Union[None, Unset, str] 88 if isinstance(self.last_signed_in, Unset): 89 last_signed_in = UNSET 90 elif isinstance(self.last_signed_in, datetime.datetime): 91 last_signed_in = self.last_signed_in.isoformat() 92 else: 93 last_signed_in = self.last_signed_in 94 95 field_dict: Dict[str, Any] = {} 96 field_dict.update(self.additional_properties) 97 field_dict.update( 98 { 99 "username": username, 100 "name": name, 101 "phone": phone, 102 "email": email, 103 "organization": organization, 104 "jobTitle": job_title, 105 "department": department, 106 "invitedBy": invited_by, 107 "projectAssignments": project_assignments, 108 "groups": groups, 109 "settings": settings, 110 } 111 ) 112 if sign_up_time is not UNSET: 113 field_dict["signUpTime"] = sign_up_time 114 if last_signed_in is not UNSET: 115 field_dict["lastSignedIn"] = last_signed_in 116 117 return field_dict
119 @classmethod 120 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 121 from ..models.user_project_assignment import UserProjectAssignment 122 from ..models.user_settings import UserSettings 123 124 d = src_dict.copy() 125 username = d.pop("username") 126 127 name = d.pop("name") 128 129 phone = d.pop("phone") 130 131 email = d.pop("email") 132 133 organization = d.pop("organization") 134 135 job_title = d.pop("jobTitle") 136 137 department = d.pop("department") 138 139 invited_by = d.pop("invitedBy") 140 141 project_assignments = [] 142 _project_assignments = d.pop("projectAssignments") 143 for project_assignments_item_data in _project_assignments: 144 project_assignments_item = UserProjectAssignment.from_dict(project_assignments_item_data) 145 146 project_assignments.append(project_assignments_item) 147 148 groups = cast(List[str], d.pop("groups")) 149 150 settings = UserSettings.from_dict(d.pop("settings")) 151 152 def _parse_sign_up_time(data: object) -> Union[None, Unset, datetime.datetime]: 153 if data is None: 154 return data 155 if isinstance(data, Unset): 156 return data 157 try: 158 if not isinstance(data, str): 159 raise TypeError() 160 sign_up_time_type_0 = isoparse(data) 161 162 return sign_up_time_type_0 163 except: # noqa: E722 164 pass 165 return cast(Union[None, Unset, datetime.datetime], data) 166 167 sign_up_time = _parse_sign_up_time(d.pop("signUpTime", UNSET)) 168 169 def _parse_last_signed_in(data: object) -> Union[None, Unset, datetime.datetime]: 170 if data is None: 171 return data 172 if isinstance(data, Unset): 173 return data 174 try: 175 if not isinstance(data, str): 176 raise TypeError() 177 last_signed_in_type_0 = isoparse(data) 178 179 return last_signed_in_type_0 180 except: # noqa: E722 181 pass 182 return cast(Union[None, Unset, datetime.datetime], data) 183 184 last_signed_in = _parse_last_signed_in(d.pop("lastSignedIn", UNSET)) 185 186 user_detail = cls( 187 username=username, 188 name=name, 189 phone=phone, 190 email=email, 191 organization=organization, 192 job_title=job_title, 193 department=department, 194 invited_by=invited_by, 195 project_assignments=project_assignments, 196 groups=groups, 197 settings=settings, 198 sign_up_time=sign_up_time, 199 last_signed_in=last_signed_in, 200 ) 201 202 user_detail.additional_properties = d 203 return user_detail
15@_attrs_define 16class UserProjectAssignment: 17 """ 18 Attributes: 19 project_id (str): 20 role (ProjectRole): 21 created_by (str): 22 created_at (Union[None, Unset, datetime.datetime]): 23 """ 24 25 project_id: str 26 role: ProjectRole 27 created_by: str 28 created_at: Union[None, Unset, datetime.datetime] = UNSET 29 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 30 31 def to_dict(self) -> Dict[str, Any]: 32 project_id = self.project_id 33 34 role = self.role.value 35 36 created_by = self.created_by 37 38 created_at: Union[None, Unset, str] 39 if isinstance(self.created_at, Unset): 40 created_at = UNSET 41 elif isinstance(self.created_at, datetime.datetime): 42 created_at = self.created_at.isoformat() 43 else: 44 created_at = self.created_at 45 46 field_dict: Dict[str, Any] = {} 47 field_dict.update(self.additional_properties) 48 field_dict.update( 49 { 50 "projectId": project_id, 51 "role": role, 52 "createdBy": created_by, 53 } 54 ) 55 if created_at is not UNSET: 56 field_dict["createdAt"] = created_at 57 58 return field_dict 59 60 @classmethod 61 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 62 d = src_dict.copy() 63 project_id = d.pop("projectId") 64 65 role = ProjectRole(d.pop("role")) 66 67 created_by = d.pop("createdBy") 68 69 def _parse_created_at(data: object) -> Union[None, Unset, datetime.datetime]: 70 if data is None: 71 return data 72 if isinstance(data, Unset): 73 return data 74 try: 75 if not isinstance(data, str): 76 raise TypeError() 77 created_at_type_0 = isoparse(data) 78 79 return created_at_type_0 80 except: # noqa: E722 81 pass 82 return cast(Union[None, Unset, datetime.datetime], data) 83 84 created_at = _parse_created_at(d.pop("createdAt", UNSET)) 85 86 user_project_assignment = cls( 87 project_id=project_id, 88 role=role, 89 created_by=created_by, 90 created_at=created_at, 91 ) 92 93 user_project_assignment.additional_properties = d 94 return user_project_assignment 95 96 @property 97 def additional_keys(self) -> List[str]: 98 return list(self.additional_properties.keys())
Attributes:
- project_id (str):
- role (ProjectRole):
- created_by (str):
- created_at (Union[None, Unset, datetime.datetime]):
27def __init__(self, project_id, role, created_by, created_at=attr_dict['created_at'].default): 28 self.project_id = project_id 29 self.role = role 30 self.created_by = created_by 31 self.created_at = created_at 32 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UserProjectAssignment.
31 def to_dict(self) -> Dict[str, Any]: 32 project_id = self.project_id 33 34 role = self.role.value 35 36 created_by = self.created_by 37 38 created_at: Union[None, Unset, str] 39 if isinstance(self.created_at, Unset): 40 created_at = UNSET 41 elif isinstance(self.created_at, datetime.datetime): 42 created_at = self.created_at.isoformat() 43 else: 44 created_at = self.created_at 45 46 field_dict: Dict[str, Any] = {} 47 field_dict.update(self.additional_properties) 48 field_dict.update( 49 { 50 "projectId": project_id, 51 "role": role, 52 "createdBy": created_by, 53 } 54 ) 55 if created_at is not UNSET: 56 field_dict["createdAt"] = created_at 57 58 return field_dict
60 @classmethod 61 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 62 d = src_dict.copy() 63 project_id = d.pop("projectId") 64 65 role = ProjectRole(d.pop("role")) 66 67 created_by = d.pop("createdBy") 68 69 def _parse_created_at(data: object) -> Union[None, Unset, datetime.datetime]: 70 if data is None: 71 return data 72 if isinstance(data, Unset): 73 return data 74 try: 75 if not isinstance(data, str): 76 raise TypeError() 77 created_at_type_0 = isoparse(data) 78 79 return created_at_type_0 80 except: # noqa: E722 81 pass 82 return cast(Union[None, Unset, datetime.datetime], data) 83 84 created_at = _parse_created_at(d.pop("createdAt", UNSET)) 85 86 user_project_assignment = cls( 87 project_id=project_id, 88 role=role, 89 created_by=created_by, 90 created_at=created_at, 91 ) 92 93 user_project_assignment.additional_properties = d 94 return user_project_assignment
10@_attrs_define 11class UserSettings: 12 """Additional settings for the user 13 14 Attributes: 15 analysis_update_notifications_enabled (bool): 16 """ 17 18 analysis_update_notifications_enabled: bool 19 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 20 21 def to_dict(self) -> Dict[str, Any]: 22 analysis_update_notifications_enabled = self.analysis_update_notifications_enabled 23 24 field_dict: Dict[str, Any] = {} 25 field_dict.update(self.additional_properties) 26 field_dict.update( 27 { 28 "analysisUpdateNotificationsEnabled": analysis_update_notifications_enabled, 29 } 30 ) 31 32 return field_dict 33 34 @classmethod 35 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 36 d = src_dict.copy() 37 analysis_update_notifications_enabled = d.pop("analysisUpdateNotificationsEnabled") 38 39 user_settings = cls( 40 analysis_update_notifications_enabled=analysis_update_notifications_enabled, 41 ) 42 43 user_settings.additional_properties = d 44 return user_settings 45 46 @property 47 def additional_keys(self) -> List[str]: 48 return list(self.additional_properties.keys())
Additional settings for the user
Attributes:
- analysis_update_notifications_enabled (bool):
24def __init__(self, analysis_update_notifications_enabled): 25 self.analysis_update_notifications_enabled = analysis_update_notifications_enabled 26 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class UserSettings.
21 def to_dict(self) -> Dict[str, Any]: 22 analysis_update_notifications_enabled = self.analysis_update_notifications_enabled 23 24 field_dict: Dict[str, Any] = {} 25 field_dict.update(self.additional_properties) 26 field_dict.update( 27 { 28 "analysisUpdateNotificationsEnabled": analysis_update_notifications_enabled, 29 } 30 ) 31 32 return field_dict
34 @classmethod 35 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 36 d = src_dict.copy() 37 analysis_update_notifications_enabled = d.pop("analysisUpdateNotificationsEnabled") 38 39 user_settings = cls( 40 analysis_update_notifications_enabled=analysis_update_notifications_enabled, 41 ) 42 43 user_settings.additional_properties = d 44 return user_settings
10@_attrs_define 11class ValidateFileNamePatternsRequest: 12 """ 13 Attributes: 14 file_names (List[str]): 15 file_name_patterns (List[str]): 16 """ 17 18 file_names: List[str] 19 file_name_patterns: List[str] 20 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 21 22 def to_dict(self) -> Dict[str, Any]: 23 file_names = self.file_names 24 25 file_name_patterns = self.file_name_patterns 26 27 field_dict: Dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "fileNames": file_names, 32 "fileNamePatterns": file_name_patterns, 33 } 34 ) 35 36 return field_dict 37 38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 file_names = cast(List[str], d.pop("fileNames")) 42 43 file_name_patterns = cast(List[str], d.pop("fileNamePatterns")) 44 45 validate_file_name_patterns_request = cls( 46 file_names=file_names, 47 file_name_patterns=file_name_patterns, 48 ) 49 50 validate_file_name_patterns_request.additional_properties = d 51 return validate_file_name_patterns_request 52 53 @property 54 def additional_keys(self) -> List[str]: 55 return list(self.additional_properties.keys())
Attributes:
- file_names (List[str]):
- file_name_patterns (List[str]):
25def __init__(self, file_names, file_name_patterns): 26 self.file_names = file_names 27 self.file_name_patterns = file_name_patterns 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ValidateFileNamePatternsRequest.
22 def to_dict(self) -> Dict[str, Any]: 23 file_names = self.file_names 24 25 file_name_patterns = self.file_name_patterns 26 27 field_dict: Dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "fileNames": file_names, 32 "fileNamePatterns": file_name_patterns, 33 } 34 ) 35 36 return field_dict
38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 file_names = cast(List[str], d.pop("fileNames")) 42 43 file_name_patterns = cast(List[str], d.pop("fileNamePatterns")) 44 45 validate_file_name_patterns_request = cls( 46 file_names=file_names, 47 file_name_patterns=file_name_patterns, 48 ) 49 50 validate_file_name_patterns_request.additional_properties = d 51 return validate_file_name_patterns_request
10@_attrs_define 11class ValidateFileRequirementsRequest: 12 """ 13 Attributes: 14 file_names (List[str]): 15 sample_sheet (str): 16 """ 17 18 file_names: List[str] 19 sample_sheet: str 20 additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) 21 22 def to_dict(self) -> Dict[str, Any]: 23 file_names = self.file_names 24 25 sample_sheet = self.sample_sheet 26 27 field_dict: Dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "fileNames": file_names, 32 "sampleSheet": sample_sheet, 33 } 34 ) 35 36 return field_dict 37 38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 file_names = cast(List[str], d.pop("fileNames")) 42 43 sample_sheet = d.pop("sampleSheet") 44 45 validate_file_requirements_request = cls( 46 file_names=file_names, 47 sample_sheet=sample_sheet, 48 ) 49 50 validate_file_requirements_request.additional_properties = d 51 return validate_file_requirements_request 52 53 @property 54 def additional_keys(self) -> List[str]: 55 return list(self.additional_properties.keys())
Attributes:
- file_names (List[str]):
- sample_sheet (str):
25def __init__(self, file_names, sample_sheet): 26 self.file_names = file_names 27 self.sample_sheet = sample_sheet 28 self.additional_properties = __attr_factory_additional_properties()
Method generated by attrs for class ValidateFileRequirementsRequest.
22 def to_dict(self) -> Dict[str, Any]: 23 file_names = self.file_names 24 25 sample_sheet = self.sample_sheet 26 27 field_dict: Dict[str, Any] = {} 28 field_dict.update(self.additional_properties) 29 field_dict.update( 30 { 31 "fileNames": file_names, 32 "sampleSheet": sample_sheet, 33 } 34 ) 35 36 return field_dict
38 @classmethod 39 def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: 40 d = src_dict.copy() 41 file_names = cast(List[str], d.pop("fileNames")) 42 43 sample_sheet = d.pop("sampleSheet") 44 45 validate_file_requirements_request = cls( 46 file_names=file_names, 47 sample_sheet=sample_sheet, 48 ) 49 50 validate_file_requirements_request.additional_properties = d 51 return validate_file_requirements_request