aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run.py
blob: 903da30f50257a314084c618164d4e7ab751de7f (about) (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
# coding: utf-8

"""
    Hatchet API

    The Hatchet API

    The version of the OpenAPI document: 1.0.0
    Generated by OpenAPI Generator (https://openapi-generator.tech)

    Do not edit the class manually.
"""  # noqa: E501


from __future__ import annotations

import json
import pprint
import re  # noqa: F401
from datetime import datetime
from typing import Any, ClassVar, Dict, List, Optional, Set

from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
from typing_extensions import Annotated, Self

from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus
from hatchet_sdk.clients.rest.models.workflow_run_triggered_by import (
    WorkflowRunTriggeredBy,
)
from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion


class WorkflowRun(BaseModel):
    """
    WorkflowRun
    """  # noqa: E501

    metadata: APIResourceMeta
    tenant_id: StrictStr = Field(alias="tenantId")
    workflow_version_id: StrictStr = Field(alias="workflowVersionId")
    workflow_version: Optional[WorkflowVersion] = Field(
        default=None, alias="workflowVersion"
    )
    status: WorkflowRunStatus
    display_name: Optional[StrictStr] = Field(default=None, alias="displayName")
    job_runs: Optional[List[JobRun]] = Field(default=None, alias="jobRuns")
    triggered_by: WorkflowRunTriggeredBy = Field(alias="triggeredBy")
    input: Optional[Dict[str, Any]] = None
    error: Optional[StrictStr] = None
    started_at: Optional[datetime] = Field(default=None, alias="startedAt")
    finished_at: Optional[datetime] = Field(default=None, alias="finishedAt")
    duration: Optional[StrictInt] = None
    parent_id: Optional[
        Annotated[str, Field(min_length=36, strict=True, max_length=36)]
    ] = Field(default=None, alias="parentId")
    parent_step_run_id: Optional[
        Annotated[str, Field(min_length=36, strict=True, max_length=36)]
    ] = Field(default=None, alias="parentStepRunId")
    additional_metadata: Optional[Dict[str, Any]] = Field(
        default=None, alias="additionalMetadata"
    )
    __properties: ClassVar[List[str]] = [
        "metadata",
        "tenantId",
        "workflowVersionId",
        "workflowVersion",
        "status",
        "displayName",
        "jobRuns",
        "triggeredBy",
        "input",
        "error",
        "startedAt",
        "finishedAt",
        "duration",
        "parentId",
        "parentStepRunId",
        "additionalMetadata",
    ]

    model_config = ConfigDict(
        populate_by_name=True,
        validate_assignment=True,
        protected_namespaces=(),
    )

    def to_str(self) -> str:
        """Returns the string representation of the model using alias"""
        return pprint.pformat(self.model_dump(by_alias=True))

    def to_json(self) -> str:
        """Returns the JSON representation of the model using alias"""
        # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
        return json.dumps(self.to_dict())

    @classmethod
    def from_json(cls, json_str: str) -> Optional[Self]:
        """Create an instance of WorkflowRun from a JSON string"""
        return cls.from_dict(json.loads(json_str))

    def to_dict(self) -> Dict[str, Any]:
        """Return the dictionary representation of the model using alias.

        This has the following differences from calling pydantic's
        `self.model_dump(by_alias=True)`:

        * `None` is only added to the output dict for nullable fields that
          were set at model initialization. Other fields with value `None`
          are ignored.
        """
        excluded_fields: Set[str] = set([])

        _dict = self.model_dump(
            by_alias=True,
            exclude=excluded_fields,
            exclude_none=True,
        )
        # override the default output from pydantic by calling `to_dict()` of metadata
        if self.metadata:
            _dict["metadata"] = self.metadata.to_dict()
        # override the default output from pydantic by calling `to_dict()` of workflow_version
        if self.workflow_version:
            _dict["workflowVersion"] = self.workflow_version.to_dict()
        # override the default output from pydantic by calling `to_dict()` of each item in job_runs (list)
        _items = []
        if self.job_runs:
            for _item_job_runs in self.job_runs:
                if _item_job_runs:
                    _items.append(_item_job_runs.to_dict())
            _dict["jobRuns"] = _items
        # override the default output from pydantic by calling `to_dict()` of triggered_by
        if self.triggered_by:
            _dict["triggeredBy"] = self.triggered_by.to_dict()
        return _dict

    @classmethod
    def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
        """Create an instance of WorkflowRun from a dict"""
        if obj is None:
            return None

        if not isinstance(obj, dict):
            return cls.model_validate(obj)

        _obj = cls.model_validate(
            {
                "metadata": (
                    APIResourceMeta.from_dict(obj["metadata"])
                    if obj.get("metadata") is not None
                    else None
                ),
                "tenantId": obj.get("tenantId"),
                "workflowVersionId": obj.get("workflowVersionId"),
                "workflowVersion": (
                    WorkflowVersion.from_dict(obj["workflowVersion"])
                    if obj.get("workflowVersion") is not None
                    else None
                ),
                "status": obj.get("status"),
                "displayName": obj.get("displayName"),
                "jobRuns": (
                    [JobRun.from_dict(_item) for _item in obj["jobRuns"]]
                    if obj.get("jobRuns") is not None
                    else None
                ),
                "triggeredBy": (
                    WorkflowRunTriggeredBy.from_dict(obj["triggeredBy"])
                    if obj.get("triggeredBy") is not None
                    else None
                ),
                "input": obj.get("input"),
                "error": obj.get("error"),
                "startedAt": obj.get("startedAt"),
                "finishedAt": obj.get("finishedAt"),
                "duration": obj.get("duration"),
                "parentId": obj.get("parentId"),
                "parentStepRunId": obj.get("parentStepRunId"),
                "additionalMetadata": obj.get("additionalMetadata"),
            }
        )
        return _obj


from hatchet_sdk.clients.rest.models.job_run import JobRun

# TODO: Rewrite to not use raise_errors
WorkflowRun.model_rebuild(raise_errors=False)