aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/hatchet_sdk
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/hatchet_sdk')
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/__init__.py247
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/client.py119
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/admin.py542
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/dispatcher/action_listener.py423
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/dispatcher/dispatcher.py204
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/event_ts.py28
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/events.py183
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/__init__.py293
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/__init__.py19
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/api_token_api.py858
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/default_api.py2257
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/event_api.py2548
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/github_api.py331
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/healthcheck_api.py483
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/log_api.py447
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/metadata_api.py728
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/rate_limits_api.py423
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/slack_api.py577
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/sns_api.py872
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/step_run_api.py2200
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/tenant_api.py4428
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/user_api.py2888
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/worker_api.py858
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/workflow_api.py6310
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/workflow_run_api.py1932
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/workflow_runs_api.py610
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api_client.py759
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api_response.py22
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/configuration.py611
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/exceptions.py200
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/__init__.py260
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/accept_invite_request.py83
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_error.py102
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_errors.py100
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta.py144
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta_auth.py85
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta_integration.py88
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta_posthog.py90
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_resource_meta.py98
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_token.py105
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/bulk_create_event_request.py100
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/bulk_create_event_response.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cancel_event_request.py85
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cancel_step_run_request.py83
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/concurrency_limit_strategy.py39
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_api_token_request.py92
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_api_token_response.py83
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_cron_workflow_trigger_request.py98
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_event_request.py95
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_pull_request_from_step_run.py83
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_sns_integration_request.py85
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_tenant_alert_email_group_request.py83
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_tenant_invite_request.py86
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_tenant_request.py84
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows.py131
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows_method.py37
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py37
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event.py143
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_data.py83
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_key_list.py98
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_order_by_direction.py37
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_order_by_field.py36
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_update_cancel200_response.py85
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_workflow_run_summary.py116
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/events.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/get_step_run_diff_response.py100
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/github_app_installation.py107
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/github_branch.py86
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/github_repo.py86
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/info_get_version200_response.py83
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/job.py132
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/job_run.py176
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/job_run_status.py41
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/link_github_repository_request.py106
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_api_tokens_response.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_github_app_installations_response.py112
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_pull_requests_response.py100
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_slack_webhooks.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_sns_integrations.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line.py94
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_level.py39
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_order_by_direction.py37
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_order_by_field.py36
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/pagination_response.py95
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/pull_request.py112
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/pull_request_state.py37
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/queue_metrics.py97
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit.py117
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit_order_by_direction.py37
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit_order_by_field.py38
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/recent_step_runs.py118
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/reject_invite_request.py83
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/replay_event_request.py85
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/replay_workflow_runs_request.py85
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py100
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rerun_step_run_request.py83
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/schedule_workflow_run_request.py92
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_run_status.py42
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows.py149
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows_method.py37
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py37
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/semaphore_slots.py113
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/slack_webhook.py127
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/sns_integration.py114
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step.py123
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run.py202
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_archive.py142
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_archive_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_diff.py91
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event.py120
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event_reason.py52
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event_severity.py38
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_status.py44
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant.py118
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_alert_email_group.py98
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_alert_email_group_list.py112
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_alerting_settings.py143
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_invite.py120
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_invite_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_member.py123
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_member_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_member_role.py38
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_queue_metrics.py116
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_resource.py40
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_resource_limit.py135
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_resource_policy.py102
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py83
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/trigger_workflow_run_request.py91
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_tenant_alert_email_group_request.py83
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_tenant_invite_request.py85
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_tenant_request.py137
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_worker_request.py87
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user.py126
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_change_password_request.py88
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_login_request.py86
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_register_request.py91
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_tenant_memberships_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_tenant_public.py86
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker.py100
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_create_request.py94
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_create_response.py96
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_created.py102
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_list_response.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_request.py102
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_request_list_response.py102
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_request_method.py38
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker.py239
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_label.py102
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_runtime_info.py103
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_runtime_sdks.py38
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_type.py38
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow.py165
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_concurrency.py107
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_deployment_config.py136
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_kind.py38
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_list.py120
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_metrics.py97
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run.py188
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_cancel200_response.py85
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_list.py110
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_order_by_direction.py37
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_order_by_field.py39
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_shape.py186
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_status.py42
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_triggered_by.py112
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_runs_cancel_request.py85
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_runs_metrics.py94
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_runs_metrics_counts.py104
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_tag.py84
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_trigger_cron_ref.py86
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_trigger_event_ref.py86
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_triggers.py141
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_update_request.py85
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version.py170
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version_concurrency.py114
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version_definition.py85
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version_meta.py123
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_workers_count.py95
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/rest.py187
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/tenacity_utils.py39
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest_client.py611
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/run_event_listener.py260
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/clients/workflow_listener.py277
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/connection.py64
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/context/__init__.py1
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/context/context.py446
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/context/worker_context.py28
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/dispatcher_pb2.py102
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/dispatcher_pb2.pyi387
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/dispatcher_pb2_grpc.py621
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/events_pb2.py46
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/events_pb2.pyi87
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/events_pb2_grpc.py274
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/workflows_pb2.py80
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/workflows_pb2.pyi312
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/workflows_pb2_grpc.py277
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/features/cron.py286
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/features/scheduled.py248
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/hatchet.py310
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/labels.py10
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/loader.py244
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/logger.py13
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/metadata.py2
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/opentelemetry/instrumentor.py396
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/py.typed0
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/rate_limit.py126
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/semver.py30
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/token.py27
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/utils/aio_utils.py137
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/utils/backoff.py9
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/utils/serialization.py18
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/utils/types.py8
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/utils/typing.py12
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/v2/callable.py202
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/v2/concurrency.py47
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/v2/hatchet.py224
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/worker/__init__.py1
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/worker/action_listener_process.py278
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/run_loop_manager.py112
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/runner.py460
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/utils/capture_logs.py81
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/utils/error_with_traceback.py6
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/worker/worker.py392
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/workflow.py261
-rw-r--r--.venv/lib/python3.12/site-packages/hatchet_sdk/workflow_run.py59
233 files changed, 55633 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/__init__.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/__init__.py
new file mode 100644
index 00000000..3162c25c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/__init__.py
@@ -0,0 +1,247 @@
+from hatchet_sdk.clients.rest.models.accept_invite_request import AcceptInviteRequest
+
+# import models into sdk package
+from hatchet_sdk.clients.rest.models.api_error import APIError
+from hatchet_sdk.clients.rest.models.api_errors import APIErrors
+from hatchet_sdk.clients.rest.models.api_meta import APIMeta
+from hatchet_sdk.clients.rest.models.api_meta_auth import APIMetaAuth
+from hatchet_sdk.clients.rest.models.api_meta_integration import APIMetaIntegration
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.api_token import APIToken
+from hatchet_sdk.clients.rest.models.create_api_token_request import (
+ CreateAPITokenRequest,
+)
+from hatchet_sdk.clients.rest.models.create_api_token_response import (
+ CreateAPITokenResponse,
+)
+from hatchet_sdk.clients.rest.models.create_pull_request_from_step_run import (
+ CreatePullRequestFromStepRun,
+)
+from hatchet_sdk.clients.rest.models.create_tenant_invite_request import (
+ CreateTenantInviteRequest,
+)
+from hatchet_sdk.clients.rest.models.create_tenant_request import CreateTenantRequest
+from hatchet_sdk.clients.rest.models.event import Event
+from hatchet_sdk.clients.rest.models.event_data import EventData
+from hatchet_sdk.clients.rest.models.event_key_list import EventKeyList
+from hatchet_sdk.clients.rest.models.event_list import EventList
+from hatchet_sdk.clients.rest.models.event_order_by_direction import (
+ EventOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.event_order_by_field import EventOrderByField
+from hatchet_sdk.clients.rest.models.event_workflow_run_summary import (
+ EventWorkflowRunSummary,
+)
+from hatchet_sdk.clients.rest.models.get_step_run_diff_response import (
+ GetStepRunDiffResponse,
+)
+from hatchet_sdk.clients.rest.models.github_app_installation import (
+ GithubAppInstallation,
+)
+from hatchet_sdk.clients.rest.models.github_branch import GithubBranch
+from hatchet_sdk.clients.rest.models.github_repo import GithubRepo
+from hatchet_sdk.clients.rest.models.job import Job
+from hatchet_sdk.clients.rest.models.job_run import JobRun
+from hatchet_sdk.clients.rest.models.job_run_status import JobRunStatus
+from hatchet_sdk.clients.rest.models.link_github_repository_request import (
+ LinkGithubRepositoryRequest,
+)
+from hatchet_sdk.clients.rest.models.list_api_tokens_response import (
+ ListAPITokensResponse,
+)
+from hatchet_sdk.clients.rest.models.list_github_app_installations_response import (
+ ListGithubAppInstallationsResponse,
+)
+from hatchet_sdk.clients.rest.models.list_pull_requests_response import (
+ ListPullRequestsResponse,
+)
+from hatchet_sdk.clients.rest.models.log_line import LogLine
+from hatchet_sdk.clients.rest.models.log_line_level import LogLineLevel
+from hatchet_sdk.clients.rest.models.log_line_list import LogLineList
+from hatchet_sdk.clients.rest.models.log_line_order_by_direction import (
+ LogLineOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.log_line_order_by_field import LogLineOrderByField
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.pull_request import PullRequest
+from hatchet_sdk.clients.rest.models.pull_request_state import PullRequestState
+from hatchet_sdk.clients.rest.models.reject_invite_request import RejectInviteRequest
+from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest
+from hatchet_sdk.clients.rest.models.rerun_step_run_request import RerunStepRunRequest
+from hatchet_sdk.clients.rest.models.step import Step
+from hatchet_sdk.clients.rest.models.step_run import StepRun
+from hatchet_sdk.clients.rest.models.step_run_diff import StepRunDiff
+from hatchet_sdk.clients.rest.models.step_run_status import StepRunStatus
+from hatchet_sdk.clients.rest.models.tenant import Tenant
+from hatchet_sdk.clients.rest.models.tenant_invite import TenantInvite
+from hatchet_sdk.clients.rest.models.tenant_invite_list import TenantInviteList
+from hatchet_sdk.clients.rest.models.tenant_list import TenantList
+from hatchet_sdk.clients.rest.models.tenant_member import TenantMember
+from hatchet_sdk.clients.rest.models.tenant_member_list import TenantMemberList
+from hatchet_sdk.clients.rest.models.tenant_member_role import TenantMemberRole
+from hatchet_sdk.clients.rest.models.trigger_workflow_run_request import (
+ TriggerWorkflowRunRequest,
+)
+from hatchet_sdk.clients.rest.models.update_tenant_invite_request import (
+ UpdateTenantInviteRequest,
+)
+from hatchet_sdk.clients.rest.models.user import User
+from hatchet_sdk.clients.rest.models.user_login_request import UserLoginRequest
+from hatchet_sdk.clients.rest.models.user_register_request import UserRegisterRequest
+from hatchet_sdk.clients.rest.models.user_tenant_memberships_list import (
+ UserTenantMembershipsList,
+)
+from hatchet_sdk.clients.rest.models.user_tenant_public import UserTenantPublic
+from hatchet_sdk.clients.rest.models.worker_list import WorkerList
+from hatchet_sdk.clients.rest.models.workflow import Workflow
+from hatchet_sdk.clients.rest.models.workflow_deployment_config import (
+ WorkflowDeploymentConfig,
+)
+from hatchet_sdk.clients.rest.models.workflow_list import WorkflowList
+from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun
+from hatchet_sdk.clients.rest.models.workflow_run_list import WorkflowRunList
+from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus
+from hatchet_sdk.clients.rest.models.workflow_run_triggered_by import (
+ WorkflowRunTriggeredBy,
+)
+from hatchet_sdk.clients.rest.models.workflow_tag import WorkflowTag
+from hatchet_sdk.clients.rest.models.workflow_trigger_cron_ref import (
+ WorkflowTriggerCronRef,
+)
+from hatchet_sdk.clients.rest.models.workflow_trigger_event_ref import (
+ WorkflowTriggerEventRef,
+)
+from hatchet_sdk.clients.rest.models.workflow_triggers import WorkflowTriggers
+from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion
+from hatchet_sdk.clients.rest.models.workflow_version_definition import (
+ WorkflowVersionDefinition,
+)
+from hatchet_sdk.clients.rest.models.workflow_version_meta import WorkflowVersionMeta
+from hatchet_sdk.contracts.workflows_pb2 import (
+ ConcurrencyLimitStrategy,
+ CreateWorkflowVersionOpts,
+ RateLimitDuration,
+ StickyStrategy,
+ WorkerLabelComparator,
+)
+from hatchet_sdk.utils.aio_utils import sync_to_async
+
+from .client import new_client
+from .clients.admin import (
+ ChildTriggerWorkflowOptions,
+ DedupeViolationErr,
+ ScheduleTriggerWorkflowOptions,
+ TriggerWorkflowOptions,
+)
+from .clients.events import PushEventOptions
+from .clients.run_event_listener import StepRunEventType, WorkflowRunEventType
+from .context.context import Context
+from .context.worker_context import WorkerContext
+from .hatchet import ClientConfig, Hatchet, concurrency, on_failure_step, step, workflow
+from .worker import Worker, WorkerStartOptions, WorkerStatus
+from .workflow import ConcurrencyExpression
+
+__all__ = [
+ "AcceptInviteRequest",
+ "APIError",
+ "APIErrors",
+ "APIMeta",
+ "APIMetaAuth",
+ "APIMetaIntegration",
+ "APIResourceMeta",
+ "APIToken",
+ "CreateAPITokenRequest",
+ "CreateAPITokenResponse",
+ "CreatePullRequestFromStepRun",
+ "CreateTenantInviteRequest",
+ "CreateTenantRequest",
+ "Event",
+ "EventData",
+ "EventKeyList",
+ "EventList",
+ "EventOrderByDirection",
+ "EventOrderByField",
+ "EventWorkflowRunSummary",
+ "GetStepRunDiffResponse",
+ "GithubAppInstallation",
+ "GithubBranch",
+ "GithubRepo",
+ "Job",
+ "JobRun",
+ "JobRunStatus",
+ "LinkGithubRepositoryRequest",
+ "ListAPITokensResponse",
+ "ListGithubAppInstallationsResponse",
+ "ListPullRequestsResponse",
+ "LogLine",
+ "LogLineLevel",
+ "LogLineList",
+ "LogLineOrderByDirection",
+ "LogLineOrderByField",
+ "PaginationResponse",
+ "PullRequest",
+ "PullRequestState",
+ "RejectInviteRequest",
+ "ReplayEventRequest",
+ "RerunStepRunRequest",
+ "Step",
+ "StepRun",
+ "StepRunDiff",
+ "StepRunStatus",
+ "sync_to_async",
+ "Tenant",
+ "TenantInvite",
+ "TenantInviteList",
+ "TenantList",
+ "TenantMember",
+ "TenantMemberList",
+ "TenantMemberRole",
+ "TriggerWorkflowRunRequest",
+ "UpdateTenantInviteRequest",
+ "User",
+ "UserLoginRequest",
+ "UserRegisterRequest",
+ "UserTenantMembershipsList",
+ "UserTenantPublic",
+ "Worker",
+ "WorkerLabelComparator",
+ "WorkerList",
+ "Workflow",
+ "WorkflowDeploymentConfig",
+ "WorkflowList",
+ "WorkflowRun",
+ "WorkflowRunList",
+ "WorkflowRunStatus",
+ "WorkflowRunTriggeredBy",
+ "WorkflowTag",
+ "WorkflowTriggerCronRef",
+ "WorkflowTriggerEventRef",
+ "WorkflowTriggers",
+ "WorkflowVersion",
+ "WorkflowVersionDefinition",
+ "WorkflowVersionMeta",
+ "ConcurrencyLimitStrategy",
+ "CreateWorkflowVersionOpts",
+ "RateLimitDuration",
+ "StickyStrategy",
+ "new_client",
+ "ChildTriggerWorkflowOptions",
+ "DedupeViolationErr",
+ "ScheduleTriggerWorkflowOptions",
+ "TriggerWorkflowOptions",
+ "PushEventOptions",
+ "StepRunEventType",
+ "WorkflowRunEventType",
+ "Context",
+ "WorkerContext",
+ "ClientConfig",
+ "Hatchet",
+ "concurrency",
+ "on_failure_step",
+ "step",
+ "workflow",
+ "Worker",
+ "WorkerStartOptions",
+ "WorkerStatus",
+ "ConcurrencyExpression",
+]
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/client.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/client.py
new file mode 100644
index 00000000..45dfd394
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/client.py
@@ -0,0 +1,119 @@
+import asyncio
+from logging import Logger
+from typing import Callable
+
+import grpc
+
+from hatchet_sdk.clients.run_event_listener import RunEventListenerClient
+from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener
+from hatchet_sdk.connection import new_conn
+
+from .clients.admin import AdminClient, new_admin
+from .clients.dispatcher.dispatcher import DispatcherClient, new_dispatcher
+from .clients.events import EventClient, new_event
+from .clients.rest_client import RestApi
+from .loader import ClientConfig, ConfigLoader
+
+
+class Client:
+ admin: AdminClient
+ dispatcher: DispatcherClient
+ event: EventClient
+ rest: RestApi
+ workflow_listener: PooledWorkflowRunListener
+ logInterceptor: Logger
+ debug: bool = False
+
+ @classmethod
+ def from_environment(
+ cls,
+ defaults: ClientConfig = ClientConfig(),
+ debug: bool = False,
+ *opts_functions: Callable[[ClientConfig], None],
+ ):
+ try:
+ loop = asyncio.get_running_loop()
+ except RuntimeError:
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+
+ config: ClientConfig = ConfigLoader(".").load_client_config(defaults)
+ for opt_function in opts_functions:
+ opt_function(config)
+
+ return cls.from_config(config, debug)
+
+ @classmethod
+ def from_config(
+ cls,
+ config: ClientConfig = ClientConfig(),
+ debug: bool = False,
+ ):
+ try:
+ loop = asyncio.get_running_loop()
+ except RuntimeError:
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+
+ if config.tls_config is None:
+ raise ValueError("TLS config is required")
+
+ if config.host_port is None:
+ raise ValueError("Host and port are required")
+
+ conn: grpc.Channel = new_conn(config)
+
+ # Instantiate clients
+ event_client = new_event(conn, config)
+ admin_client = new_admin(config)
+ dispatcher_client = new_dispatcher(config)
+ rest_client = RestApi(config.server_url, config.token, config.tenant_id)
+ workflow_listener = None # Initialize this if needed
+
+ return cls(
+ event_client,
+ admin_client,
+ dispatcher_client,
+ workflow_listener,
+ rest_client,
+ config,
+ debug,
+ )
+
+ def __init__(
+ self,
+ event_client: EventClient,
+ admin_client: AdminClient,
+ dispatcher_client: DispatcherClient,
+ workflow_listener: PooledWorkflowRunListener,
+ rest_client: RestApi,
+ config: ClientConfig,
+ debug: bool = False,
+ ):
+ try:
+ loop = asyncio.get_running_loop()
+ except RuntimeError:
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+
+ self.admin = admin_client
+ self.dispatcher = dispatcher_client
+ self.event = event_client
+ self.rest = rest_client
+ self.config = config
+ self.listener = RunEventListenerClient(config)
+ self.workflow_listener = workflow_listener
+ self.logInterceptor = config.logInterceptor
+ self.debug = debug
+
+
+def with_host_port(host: str, port: int):
+ def with_host_port_impl(config: ClientConfig):
+ config.host = host
+ config.port = port
+
+ return with_host_port_impl
+
+
+new_client = Client.from_environment
+new_client_raw = Client.from_config
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/admin.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/admin.py
new file mode 100644
index 00000000..18664cef
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/admin.py
@@ -0,0 +1,542 @@
+import json
+from datetime import datetime
+from typing import Any, Callable, Dict, List, Optional, TypedDict, TypeVar, Union
+
+import grpc
+from google.protobuf import timestamp_pb2
+
+from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun
+from hatchet_sdk.clients.rest.tenacity_utils import tenacity_retry
+from hatchet_sdk.clients.run_event_listener import new_listener
+from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener
+from hatchet_sdk.connection import new_conn
+from hatchet_sdk.contracts.workflows_pb2 import (
+ BulkTriggerWorkflowRequest,
+ BulkTriggerWorkflowResponse,
+ CreateWorkflowVersionOpts,
+ PutRateLimitRequest,
+ PutWorkflowRequest,
+ RateLimitDuration,
+ ScheduleWorkflowRequest,
+ TriggerWorkflowRequest,
+ TriggerWorkflowResponse,
+ WorkflowVersion,
+)
+from hatchet_sdk.contracts.workflows_pb2_grpc import WorkflowServiceStub
+from hatchet_sdk.utils.serialization import flatten
+from hatchet_sdk.workflow_run import RunRef, WorkflowRunRef
+
+from ..loader import ClientConfig
+from ..metadata import get_metadata
+from ..workflow import WorkflowMeta
+
+
+def new_admin(config: ClientConfig):
+ return AdminClient(config)
+
+
+class ScheduleTriggerWorkflowOptions(TypedDict, total=False):
+ parent_id: Optional[str]
+ parent_step_run_id: Optional[str]
+ child_index: Optional[int]
+ child_key: Optional[str]
+ namespace: Optional[str]
+
+
+class ChildTriggerWorkflowOptions(TypedDict, total=False):
+ additional_metadata: Dict[str, str] | None = None
+ sticky: bool | None = None
+
+
+class ChildWorkflowRunDict(TypedDict, total=False):
+ workflow_name: str
+ input: Any
+ options: ChildTriggerWorkflowOptions
+ key: str | None = None
+
+
+class TriggerWorkflowOptions(ScheduleTriggerWorkflowOptions, total=False):
+ additional_metadata: Dict[str, str] | None = None
+ desired_worker_id: str | None = None
+ namespace: str | None = None
+
+
+class WorkflowRunDict(TypedDict, total=False):
+ workflow_name: str
+ input: Any
+ options: TriggerWorkflowOptions | None
+
+
+class DedupeViolationErr(Exception):
+ """Raised by the Hatchet library to indicate that a workflow has already been run with this deduplication value."""
+
+ pass
+
+
+class AdminClientBase:
+ pooled_workflow_listener: PooledWorkflowRunListener | None = None
+
+ def _prepare_workflow_request(
+ self, workflow_name: str, input: any, options: TriggerWorkflowOptions = None
+ ):
+ try:
+ payload_data = json.dumps(input)
+
+ try:
+ meta = (
+ None
+ if options is None or "additional_metadata" not in options
+ else options["additional_metadata"]
+ )
+ if meta is not None:
+ options = {
+ **options,
+ "additional_metadata": json.dumps(meta).encode("utf-8"),
+ }
+ except json.JSONDecodeError as e:
+ raise ValueError(f"Error encoding payload: {e}")
+
+ return TriggerWorkflowRequest(
+ name=workflow_name, input=payload_data, **(options or {})
+ )
+ except json.JSONDecodeError as e:
+ raise ValueError(f"Error encoding payload: {e}")
+
+ def _prepare_put_workflow_request(
+ self,
+ name: str,
+ workflow: CreateWorkflowVersionOpts | WorkflowMeta,
+ overrides: CreateWorkflowVersionOpts | None = None,
+ ):
+ try:
+ opts: CreateWorkflowVersionOpts
+
+ if isinstance(workflow, CreateWorkflowVersionOpts):
+ opts = workflow
+ else:
+ opts = workflow.get_create_opts(self.client.config.namespace)
+
+ if overrides is not None:
+ opts.MergeFrom(overrides)
+
+ opts.name = name
+
+ return PutWorkflowRequest(
+ opts=opts,
+ )
+ except grpc.RpcError as e:
+ raise ValueError(f"Could not put workflow: {e}")
+
+ def _prepare_schedule_workflow_request(
+ self,
+ name: str,
+ schedules: List[Union[datetime, timestamp_pb2.Timestamp]],
+ input={},
+ options: ScheduleTriggerWorkflowOptions = None,
+ ):
+ timestamp_schedules = []
+ for schedule in schedules:
+ if isinstance(schedule, datetime):
+ t = schedule.timestamp()
+ seconds = int(t)
+ nanos = int(t % 1 * 1e9)
+ timestamp = timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
+ timestamp_schedules.append(timestamp)
+ elif isinstance(schedule, timestamp_pb2.Timestamp):
+ timestamp_schedules.append(schedule)
+ else:
+ raise ValueError(
+ "Invalid schedule type. Must be datetime or timestamp_pb2.Timestamp."
+ )
+
+ return ScheduleWorkflowRequest(
+ name=name,
+ schedules=timestamp_schedules,
+ input=json.dumps(input),
+ **(options or {}),
+ )
+
+
+T = TypeVar("T")
+
+
+class AdminClientAioImpl(AdminClientBase):
+ def __init__(self, config: ClientConfig):
+ aio_conn = new_conn(config, True)
+ self.config = config
+ self.aio_client = WorkflowServiceStub(aio_conn)
+ self.token = config.token
+ self.listener_client = new_listener(config)
+ self.namespace = config.namespace
+
+ async def run(
+ self,
+ function: Union[str, Callable[[Any], T]],
+ input: any,
+ options: TriggerWorkflowOptions = None,
+ ) -> "RunRef[T]":
+ workflow_name = function
+
+ if not isinstance(function, str):
+ workflow_name = function.function_name
+
+ wrr = await self.run_workflow(workflow_name, input, options)
+
+ return RunRef[T](
+ wrr.workflow_run_id, wrr.workflow_listener, wrr.workflow_run_event_listener
+ )
+
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
+ @tenacity_retry
+ async def run_workflow(
+ self, workflow_name: str, input: any, options: TriggerWorkflowOptions = None
+ ) -> WorkflowRunRef:
+ try:
+ if not self.pooled_workflow_listener:
+ self.pooled_workflow_listener = PooledWorkflowRunListener(self.config)
+
+ namespace = self.namespace
+
+ if (
+ options is not None
+ and "namespace" in options
+ and options["namespace"] is not None
+ ):
+ namespace = options.pop("namespace")
+
+ if namespace != "" and not workflow_name.startswith(self.namespace):
+ workflow_name = f"{namespace}{workflow_name}"
+
+ request = self._prepare_workflow_request(workflow_name, input, options)
+
+ resp: TriggerWorkflowResponse = await self.aio_client.TriggerWorkflow(
+ request,
+ metadata=get_metadata(self.token),
+ )
+
+ return WorkflowRunRef(
+ workflow_run_id=resp.workflow_run_id,
+ workflow_listener=self.pooled_workflow_listener,
+ workflow_run_event_listener=self.listener_client,
+ )
+ except (grpc.RpcError, grpc.aio.AioRpcError) as e:
+ if e.code() == grpc.StatusCode.ALREADY_EXISTS:
+ raise DedupeViolationErr(e.details())
+
+ raise e
+
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
+ @tenacity_retry
+ async def run_workflows(
+ self,
+ workflows: list[WorkflowRunDict],
+ options: TriggerWorkflowOptions | None = None,
+ ) -> List[WorkflowRunRef]:
+ if len(workflows) == 0:
+ raise ValueError("No workflows to run")
+
+ if not self.pooled_workflow_listener:
+ self.pooled_workflow_listener = PooledWorkflowRunListener(self.config)
+
+ namespace = self.namespace
+
+ if (
+ options is not None
+ and "namespace" in options
+ and options["namespace"] is not None
+ ):
+ namespace = options["namespace"]
+ del options["namespace"]
+
+ workflow_run_requests: TriggerWorkflowRequest = []
+
+ for workflow in workflows:
+ workflow_name = workflow["workflow_name"]
+ input_data = workflow["input"]
+ options = workflow["options"]
+
+ if namespace != "" and not workflow_name.startswith(self.namespace):
+ workflow_name = f"{namespace}{workflow_name}"
+
+ # Prepare and trigger workflow for each workflow name and input
+ request = self._prepare_workflow_request(workflow_name, input_data, options)
+ workflow_run_requests.append(request)
+
+ request = BulkTriggerWorkflowRequest(workflows=workflow_run_requests)
+
+ resp: BulkTriggerWorkflowResponse = await self.aio_client.BulkTriggerWorkflow(
+ request,
+ metadata=get_metadata(self.token),
+ )
+
+ return [
+ WorkflowRunRef(
+ workflow_run_id=workflow_run_id,
+ workflow_listener=self.pooled_workflow_listener,
+ workflow_run_event_listener=self.listener_client,
+ )
+ for workflow_run_id in resp.workflow_run_ids
+ ]
+
+ @tenacity_retry
+ async def put_workflow(
+ self,
+ name: str,
+ workflow: CreateWorkflowVersionOpts | WorkflowMeta,
+ overrides: CreateWorkflowVersionOpts | None = None,
+ ) -> WorkflowVersion:
+ opts = self._prepare_put_workflow_request(name, workflow, overrides)
+
+ return await self.aio_client.PutWorkflow(
+ opts,
+ metadata=get_metadata(self.token),
+ )
+
+ @tenacity_retry
+ async def put_rate_limit(
+ self,
+ key: str,
+ limit: int,
+ duration: RateLimitDuration = RateLimitDuration.SECOND,
+ ):
+ await self.aio_client.PutRateLimit(
+ PutRateLimitRequest(
+ key=key,
+ limit=limit,
+ duration=duration,
+ ),
+ metadata=get_metadata(self.token),
+ )
+
+ @tenacity_retry
+ async def schedule_workflow(
+ self,
+ name: str,
+ schedules: List[Union[datetime, timestamp_pb2.Timestamp]],
+ input={},
+ options: ScheduleTriggerWorkflowOptions = None,
+ ) -> WorkflowVersion:
+ try:
+ namespace = self.namespace
+
+ if (
+ options is not None
+ and "namespace" in options
+ and options["namespace"] is not None
+ ):
+ namespace = options["namespace"]
+ del options["namespace"]
+
+ if namespace != "" and not name.startswith(self.namespace):
+ name = f"{namespace}{name}"
+
+ request = self._prepare_schedule_workflow_request(
+ name, schedules, input, options
+ )
+
+ return await self.aio_client.ScheduleWorkflow(
+ request,
+ metadata=get_metadata(self.token),
+ )
+ except (grpc.aio.AioRpcError, grpc.RpcError) as e:
+ if e.code() == grpc.StatusCode.ALREADY_EXISTS:
+ raise DedupeViolationErr(e.details())
+
+ raise e
+
+
+class AdminClient(AdminClientBase):
+ def __init__(self, config: ClientConfig):
+ conn = new_conn(config)
+ self.config = config
+ self.client = WorkflowServiceStub(conn)
+ self.aio = AdminClientAioImpl(config)
+ self.token = config.token
+ self.listener_client = new_listener(config)
+ self.namespace = config.namespace
+
+ @tenacity_retry
+ def put_workflow(
+ self,
+ name: str,
+ workflow: CreateWorkflowVersionOpts | WorkflowMeta,
+ overrides: CreateWorkflowVersionOpts | None = None,
+ ) -> WorkflowVersion:
+ opts = self._prepare_put_workflow_request(name, workflow, overrides)
+
+ resp: WorkflowVersion = self.client.PutWorkflow(
+ opts,
+ metadata=get_metadata(self.token),
+ )
+
+ return resp
+
+ @tenacity_retry
+ def put_rate_limit(
+ self,
+ key: str,
+ limit: int,
+ duration: Union[RateLimitDuration.Value, str] = RateLimitDuration.SECOND,
+ ):
+ self.client.PutRateLimit(
+ PutRateLimitRequest(
+ key=key,
+ limit=limit,
+ duration=duration,
+ ),
+ metadata=get_metadata(self.token),
+ )
+
+ @tenacity_retry
+ def schedule_workflow(
+ self,
+ name: str,
+ schedules: List[Union[datetime, timestamp_pb2.Timestamp]],
+ input={},
+ options: ScheduleTriggerWorkflowOptions = None,
+ ) -> WorkflowVersion:
+ try:
+ namespace = self.namespace
+
+ if (
+ options is not None
+ and "namespace" in options
+ and options["namespace"] is not None
+ ):
+ namespace = options["namespace"]
+ del options["namespace"]
+
+ if namespace != "" and not name.startswith(self.namespace):
+ name = f"{namespace}{name}"
+
+ request = self._prepare_schedule_workflow_request(
+ name, schedules, input, options
+ )
+
+ return self.client.ScheduleWorkflow(
+ request,
+ metadata=get_metadata(self.token),
+ )
+ except (grpc.RpcError, grpc.aio.AioRpcError) as e:
+ if e.code() == grpc.StatusCode.ALREADY_EXISTS:
+ raise DedupeViolationErr(e.details())
+
+ raise e
+
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
+ @tenacity_retry
+ def run_workflow(
+ self, workflow_name: str, input: any, options: TriggerWorkflowOptions = None
+ ) -> WorkflowRunRef:
+ try:
+ if not self.pooled_workflow_listener:
+ self.pooled_workflow_listener = PooledWorkflowRunListener(self.config)
+
+ namespace = self.namespace
+
+ ## TODO: Factor this out - it's repeated a lot of places
+ if (
+ options is not None
+ and "namespace" in options
+ and options["namespace"] is not None
+ ):
+ namespace = options.pop("namespace")
+
+ if namespace != "" and not workflow_name.startswith(self.namespace):
+ workflow_name = f"{namespace}{workflow_name}"
+
+ request = self._prepare_workflow_request(workflow_name, input, options)
+
+ resp: TriggerWorkflowResponse = self.client.TriggerWorkflow(
+ request,
+ metadata=get_metadata(self.token),
+ )
+
+ return WorkflowRunRef(
+ workflow_run_id=resp.workflow_run_id,
+ workflow_listener=self.pooled_workflow_listener,
+ workflow_run_event_listener=self.listener_client,
+ )
+ except (grpc.RpcError, grpc.aio.AioRpcError) as e:
+ if e.code() == grpc.StatusCode.ALREADY_EXISTS:
+ raise DedupeViolationErr(e.details())
+
+ raise e
+
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
+ @tenacity_retry
+ def run_workflows(
+ self, workflows: List[WorkflowRunDict], options: TriggerWorkflowOptions = None
+ ) -> list[WorkflowRunRef]:
+ workflow_run_requests: TriggerWorkflowRequest = []
+ if not self.pooled_workflow_listener:
+ self.pooled_workflow_listener = PooledWorkflowRunListener(self.config)
+
+ for workflow in workflows:
+ workflow_name = workflow["workflow_name"]
+ input_data = workflow["input"]
+ options = workflow["options"]
+
+ namespace = self.namespace
+
+ if (
+ options is not None
+ and "namespace" in options
+ and options["namespace"] is not None
+ ):
+ namespace = options["namespace"]
+ del options["namespace"]
+
+ if namespace != "" and not workflow_name.startswith(self.namespace):
+ workflow_name = f"{namespace}{workflow_name}"
+
+ # Prepare and trigger workflow for each workflow name and input
+ request = self._prepare_workflow_request(workflow_name, input_data, options)
+
+ workflow_run_requests.append(request)
+
+ request = BulkTriggerWorkflowRequest(workflows=workflow_run_requests)
+
+ resp: BulkTriggerWorkflowResponse = self.client.BulkTriggerWorkflow(
+ request,
+ metadata=get_metadata(self.token),
+ )
+
+ return [
+ WorkflowRunRef(
+ workflow_run_id=workflow_run_id,
+ workflow_listener=self.pooled_workflow_listener,
+ workflow_run_event_listener=self.listener_client,
+ )
+ for workflow_run_id in resp.workflow_run_ids
+ ]
+
+ def run(
+ self,
+ function: Union[str, Callable[[Any], T]],
+ input: any,
+ options: TriggerWorkflowOptions = None,
+ ) -> "RunRef[T]":
+ workflow_name = function
+
+ if not isinstance(function, str):
+ workflow_name = function.function_name
+
+ wrr = self.run_workflow(workflow_name, input, options)
+
+ return RunRef[T](
+ wrr.workflow_run_id, wrr.workflow_listener, wrr.workflow_run_event_listener
+ )
+
+ def get_workflow_run(self, workflow_run_id: str) -> WorkflowRunRef:
+ try:
+ if not self.pooled_workflow_listener:
+ self.pooled_workflow_listener = PooledWorkflowRunListener(self.config)
+
+ return WorkflowRunRef(
+ workflow_run_id=workflow_run_id,
+ workflow_listener=self.pooled_workflow_listener,
+ workflow_run_event_listener=self.listener_client,
+ )
+ except grpc.RpcError as e:
+ raise ValueError(f"Could not get workflow run: {e}")
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/dispatcher/action_listener.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/dispatcher/action_listener.py
new file mode 100644
index 00000000..cf231a76
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/dispatcher/action_listener.py
@@ -0,0 +1,423 @@
+import asyncio
+import json
+import time
+from dataclasses import dataclass, field
+from typing import Any, AsyncGenerator, List, Optional
+
+import grpc
+from grpc._cython import cygrpc
+
+from hatchet_sdk.clients.event_ts import Event_ts, read_with_interrupt
+from hatchet_sdk.clients.run_event_listener import (
+ DEFAULT_ACTION_LISTENER_RETRY_INTERVAL,
+)
+from hatchet_sdk.connection import new_conn
+from hatchet_sdk.contracts.dispatcher_pb2 import (
+ ActionType,
+ AssignedAction,
+ HeartbeatRequest,
+ WorkerLabels,
+ WorkerListenRequest,
+ WorkerUnsubscribeRequest,
+)
+from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub
+from hatchet_sdk.logger import logger
+from hatchet_sdk.utils.backoff import exp_backoff_sleep
+from hatchet_sdk.utils.serialization import flatten
+
+from ...loader import ClientConfig
+from ...metadata import get_metadata
+from ..events import proto_timestamp_now
+
+DEFAULT_ACTION_TIMEOUT = 600 # seconds
+
+
+DEFAULT_ACTION_LISTENER_RETRY_INTERVAL = 5 # seconds
+DEFAULT_ACTION_LISTENER_RETRY_COUNT = 15
+
+
+@dataclass
+class GetActionListenerRequest:
+ worker_name: str
+ services: List[str]
+ actions: List[str]
+ max_runs: Optional[int] = None
+ _labels: dict[str, str | int] = field(default_factory=dict)
+
+ labels: dict[str, WorkerLabels] = field(init=False)
+
+ def __post_init__(self):
+ self.labels = {}
+
+ for key, value in self._labels.items():
+ if isinstance(value, int):
+ self.labels[key] = WorkerLabels(intValue=value)
+ else:
+ self.labels[key] = WorkerLabels(strValue=str(value))
+
+
+@dataclass
+class Action:
+ worker_id: str
+ tenant_id: str
+ workflow_run_id: str
+ get_group_key_run_id: str
+ job_id: str
+ job_name: str
+ job_run_id: str
+ step_id: str
+ step_run_id: str
+ action_id: str
+ action_payload: str
+ action_type: ActionType
+ retry_count: int
+ additional_metadata: dict[str, str] | None = None
+
+ child_workflow_index: int | None = None
+ child_workflow_key: str | None = None
+ parent_workflow_run_id: str | None = None
+
+ def __post_init__(self):
+ if isinstance(self.additional_metadata, str) and self.additional_metadata != "":
+ try:
+ self.additional_metadata = json.loads(self.additional_metadata)
+ except json.JSONDecodeError:
+ # If JSON decoding fails, keep the original string
+ pass
+
+ # Ensure additional_metadata is always a dictionary
+ if not isinstance(self.additional_metadata, dict):
+ self.additional_metadata = {}
+
+ @property
+ def otel_attributes(self) -> dict[str, str | int]:
+ try:
+ payload_str = json.dumps(self.action_payload, default=str)
+ except Exception:
+ payload_str = str(self.action_payload)
+
+ attrs: dict[str, str | int | None] = {
+ "hatchet.tenant_id": self.tenant_id,
+ "hatchet.worker_id": self.worker_id,
+ "hatchet.workflow_run_id": self.workflow_run_id,
+ "hatchet.step_id": self.step_id,
+ "hatchet.step_run_id": self.step_run_id,
+ "hatchet.retry_count": self.retry_count,
+ "hatchet.parent_workflow_run_id": self.parent_workflow_run_id,
+ "hatchet.child_workflow_index": self.child_workflow_index,
+ "hatchet.child_workflow_key": self.child_workflow_key,
+ "hatchet.action_payload": payload_str,
+ "hatchet.workflow_name": self.job_name,
+ "hatchet.action_name": self.action_id,
+ "hatchet.get_group_key_run_id": self.get_group_key_run_id,
+ }
+
+ return {k: v for k, v in attrs.items() if v}
+
+
+START_STEP_RUN = 0
+CANCEL_STEP_RUN = 1
+START_GET_GROUP_KEY = 2
+
+
+@dataclass
+class ActionListener:
+ config: ClientConfig
+ worker_id: str
+
+ client: DispatcherStub = field(init=False)
+ aio_client: DispatcherStub = field(init=False)
+ token: str = field(init=False)
+ retries: int = field(default=0, init=False)
+ last_connection_attempt: float = field(default=0, init=False)
+ last_heartbeat_succeeded: bool = field(default=True, init=False)
+ time_last_hb_succeeded: float = field(default=9999999999999, init=False)
+ heartbeat_task: Optional[asyncio.Task] = field(default=None, init=False)
+ run_heartbeat: bool = field(default=True, init=False)
+ listen_strategy: str = field(default="v2", init=False)
+ stop_signal: bool = field(default=False, init=False)
+
+ missed_heartbeats: int = field(default=0, init=False)
+
+ def __post_init__(self):
+ self.client = DispatcherStub(new_conn(self.config))
+ self.aio_client = DispatcherStub(new_conn(self.config, True))
+ self.token = self.config.token
+
+ def is_healthy(self):
+ return self.last_heartbeat_succeeded
+
+ async def heartbeat(self):
+ # send a heartbeat every 4 seconds
+ heartbeat_delay = 4
+
+ while True:
+ if not self.run_heartbeat:
+ break
+
+ try:
+ logger.debug("sending heartbeat")
+ await self.aio_client.Heartbeat(
+ HeartbeatRequest(
+ workerId=self.worker_id,
+ heartbeatAt=proto_timestamp_now(),
+ ),
+ timeout=5,
+ metadata=get_metadata(self.token),
+ )
+
+ if self.last_heartbeat_succeeded is False:
+ logger.info("listener established")
+
+ now = time.time()
+ diff = now - self.time_last_hb_succeeded
+ if diff > heartbeat_delay + 1:
+ logger.warn(
+ f"time since last successful heartbeat: {diff:.2f}s, expects {heartbeat_delay}s"
+ )
+
+ self.last_heartbeat_succeeded = True
+ self.time_last_hb_succeeded = now
+ self.missed_heartbeats = 0
+ except grpc.RpcError as e:
+ self.missed_heartbeats = self.missed_heartbeats + 1
+ self.last_heartbeat_succeeded = False
+
+ if (
+ e.code() == grpc.StatusCode.UNAVAILABLE
+ or e.code() == grpc.StatusCode.FAILED_PRECONDITION
+ ):
+ # todo case on "recvmsg:Connection reset by peer" for updates?
+ if self.missed_heartbeats >= 3:
+ # we don't reraise the error here, as we don't want to stop the heartbeat thread
+ logger.error(
+ f"⛔️ failed heartbeat ({self.missed_heartbeats}): {e.details()}"
+ )
+ elif self.missed_heartbeats > 1:
+ logger.warning(
+ f"failed to send heartbeat ({self.missed_heartbeats}): {e.details()}"
+ )
+ else:
+ logger.error(f"failed to send heartbeat: {e}")
+
+ if self.interrupt is not None:
+ self.interrupt.set()
+
+ if e.code() == grpc.StatusCode.UNIMPLEMENTED:
+ break
+ await asyncio.sleep(heartbeat_delay)
+
+ async def start_heartbeater(self):
+ if self.heartbeat_task is not None:
+ return
+
+ try:
+ loop = asyncio.get_event_loop()
+ except RuntimeError as e:
+ if str(e).startswith("There is no current event loop in thread"):
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+ else:
+ raise e
+ self.heartbeat_task = loop.create_task(self.heartbeat())
+
+ def __aiter__(self):
+ return self._generator()
+
+ async def _generator(self) -> AsyncGenerator[Action, None]:
+ listener = None
+
+ while not self.stop_signal:
+ if listener is not None:
+ listener.cancel()
+
+ try:
+ listener = await self.get_listen_client()
+ except Exception:
+ logger.info("closing action listener loop")
+ yield None
+
+ try:
+ while not self.stop_signal:
+ self.interrupt = Event_ts()
+ t = asyncio.create_task(
+ read_with_interrupt(listener, self.interrupt)
+ )
+ await self.interrupt.wait()
+
+ if not t.done():
+ # print a warning
+ logger.warning(
+ "Interrupted read_with_interrupt task of action listener"
+ )
+
+ t.cancel()
+ listener.cancel()
+ break
+
+ assigned_action = t.result()
+
+ if assigned_action is cygrpc.EOF:
+ self.retries = self.retries + 1
+ break
+
+ self.retries = 0
+ assigned_action: AssignedAction
+
+ # Process the received action
+ action_type = self.map_action_type(assigned_action.actionType)
+
+ if (
+ assigned_action.actionPayload is None
+ or assigned_action.actionPayload == ""
+ ):
+ action_payload = None
+ else:
+ action_payload = self.parse_action_payload(
+ assigned_action.actionPayload
+ )
+
+ action = Action(
+ tenant_id=assigned_action.tenantId,
+ worker_id=self.worker_id,
+ workflow_run_id=assigned_action.workflowRunId,
+ get_group_key_run_id=assigned_action.getGroupKeyRunId,
+ job_id=assigned_action.jobId,
+ job_name=assigned_action.jobName,
+ job_run_id=assigned_action.jobRunId,
+ step_id=assigned_action.stepId,
+ step_run_id=assigned_action.stepRunId,
+ action_id=assigned_action.actionId,
+ action_payload=action_payload,
+ action_type=action_type,
+ retry_count=assigned_action.retryCount,
+ additional_metadata=assigned_action.additional_metadata,
+ child_workflow_index=assigned_action.child_workflow_index,
+ child_workflow_key=assigned_action.child_workflow_key,
+ parent_workflow_run_id=assigned_action.parent_workflow_run_id,
+ )
+
+ yield action
+ except grpc.RpcError as e:
+ self.last_heartbeat_succeeded = False
+
+ # Handle different types of errors
+ if e.code() == grpc.StatusCode.CANCELLED:
+ # Context cancelled, unsubscribe and close
+ logger.debug("Context cancelled, closing listener")
+ elif e.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
+ logger.info("Deadline exceeded, retrying subscription")
+ elif (
+ self.listen_strategy == "v2"
+ and e.code() == grpc.StatusCode.UNIMPLEMENTED
+ ):
+ # ListenV2 is not available, fallback to Listen
+ self.listen_strategy = "v1"
+ self.run_heartbeat = False
+ logger.info("ListenV2 not available, falling back to Listen")
+ else:
+ # TODO retry
+ if e.code() == grpc.StatusCode.UNAVAILABLE:
+ logger.error(f"action listener error: {e.details()}")
+ else:
+ # Unknown error, report and break
+ logger.error(f"action listener error: {e}")
+
+ self.retries = self.retries + 1
+
+ def parse_action_payload(self, payload: str):
+ try:
+ payload_data = json.loads(payload)
+ except json.JSONDecodeError as e:
+ raise ValueError(f"Error decoding payload: {e}")
+ return payload_data
+
+ def map_action_type(self, action_type):
+ if action_type == ActionType.START_STEP_RUN:
+ return START_STEP_RUN
+ elif action_type == ActionType.CANCEL_STEP_RUN:
+ return CANCEL_STEP_RUN
+ elif action_type == ActionType.START_GET_GROUP_KEY:
+ return START_GET_GROUP_KEY
+ else:
+ # logger.error(f"Unknown action type: {action_type}")
+ return None
+
+ async def get_listen_client(self):
+ current_time = int(time.time())
+
+ if (
+ current_time - self.last_connection_attempt
+ > DEFAULT_ACTION_LISTENER_RETRY_INTERVAL
+ ):
+ # reset retries if last connection was long lived
+ self.retries = 0
+
+ if self.retries > DEFAULT_ACTION_LISTENER_RETRY_COUNT:
+ # TODO this is the problem case...
+ logger.error(
+ f"could not establish action listener connection after {DEFAULT_ACTION_LISTENER_RETRY_COUNT} retries"
+ )
+ self.run_heartbeat = False
+ raise Exception("retry_exhausted")
+ elif self.retries >= 1:
+ # logger.info
+ # if we are retrying, we wait for a bit. this should eventually be replaced with exp backoff + jitter
+ await exp_backoff_sleep(
+ self.retries, DEFAULT_ACTION_LISTENER_RETRY_INTERVAL
+ )
+
+ logger.info(
+ f"action listener connection interrupted, retrying... ({self.retries}/{DEFAULT_ACTION_LISTENER_RETRY_COUNT})"
+ )
+
+ self.aio_client = DispatcherStub(new_conn(self.config, True))
+
+ if self.listen_strategy == "v2":
+ # we should await for the listener to be established before
+ # starting the heartbeater
+ listener = self.aio_client.ListenV2(
+ WorkerListenRequest(workerId=self.worker_id),
+ timeout=self.config.listener_v2_timeout,
+ metadata=get_metadata(self.token),
+ )
+ await self.start_heartbeater()
+ else:
+ # if ListenV2 is not available, fallback to Listen
+ listener = self.aio_client.Listen(
+ WorkerListenRequest(workerId=self.worker_id),
+ timeout=DEFAULT_ACTION_TIMEOUT,
+ metadata=get_metadata(self.token),
+ )
+
+ self.last_connection_attempt = current_time
+
+ return listener
+
+ def cleanup(self):
+ self.run_heartbeat = False
+ self.heartbeat_task.cancel()
+
+ try:
+ self.unregister()
+ except Exception as e:
+ logger.error(f"failed to unregister: {e}")
+
+ if self.interrupt:
+ self.interrupt.set()
+
+ def unregister(self):
+ self.run_heartbeat = False
+ self.heartbeat_task.cancel()
+
+ try:
+ req = self.aio_client.Unsubscribe(
+ WorkerUnsubscribeRequest(workerId=self.worker_id),
+ timeout=5,
+ metadata=get_metadata(self.token),
+ )
+ if self.interrupt is not None:
+ self.interrupt.set()
+ return req
+ except grpc.RpcError as e:
+ raise Exception(f"Failed to unsubscribe: {e}")
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/dispatcher/dispatcher.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/dispatcher/dispatcher.py
new file mode 100644
index 00000000..407a80cc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/dispatcher/dispatcher.py
@@ -0,0 +1,204 @@
+from typing import Any, cast
+
+from google.protobuf.timestamp_pb2 import Timestamp
+
+from hatchet_sdk.clients.dispatcher.action_listener import (
+ Action,
+ ActionListener,
+ GetActionListenerRequest,
+)
+from hatchet_sdk.clients.rest.tenacity_utils import tenacity_retry
+from hatchet_sdk.connection import new_conn
+from hatchet_sdk.contracts.dispatcher_pb2 import (
+ STEP_EVENT_TYPE_COMPLETED,
+ STEP_EVENT_TYPE_FAILED,
+ ActionEventResponse,
+ GroupKeyActionEvent,
+ GroupKeyActionEventType,
+ OverridesData,
+ RefreshTimeoutRequest,
+ ReleaseSlotRequest,
+ StepActionEvent,
+ StepActionEventType,
+ UpsertWorkerLabelsRequest,
+ WorkerLabels,
+ WorkerRegisterRequest,
+ WorkerRegisterResponse,
+)
+from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub
+
+from ...loader import ClientConfig
+from ...metadata import get_metadata
+
+DEFAULT_REGISTER_TIMEOUT = 30
+
+
+def new_dispatcher(config: ClientConfig) -> "DispatcherClient":
+ return DispatcherClient(config=config)
+
+
+class DispatcherClient:
+ config: ClientConfig
+
+ def __init__(self, config: ClientConfig):
+ conn = new_conn(config)
+ self.client = DispatcherStub(conn) # type: ignore[no-untyped-call]
+
+ aio_conn = new_conn(config, True)
+ self.aio_client = DispatcherStub(aio_conn) # type: ignore[no-untyped-call]
+ self.token = config.token
+ self.config = config
+
+ async def get_action_listener(
+ self, req: GetActionListenerRequest
+ ) -> ActionListener:
+
+ # Override labels with the preset labels
+ preset_labels = self.config.worker_preset_labels
+
+ for key, value in preset_labels.items():
+ req.labels[key] = WorkerLabels(strValue=str(value))
+
+ # Register the worker
+ response: WorkerRegisterResponse = await self.aio_client.Register(
+ WorkerRegisterRequest(
+ workerName=req.worker_name,
+ actions=req.actions,
+ services=req.services,
+ maxRuns=req.max_runs,
+ labels=req.labels,
+ ),
+ timeout=DEFAULT_REGISTER_TIMEOUT,
+ metadata=get_metadata(self.token),
+ )
+
+ return ActionListener(self.config, response.workerId)
+
+ async def send_step_action_event(
+ self, action: Action, event_type: StepActionEventType, payload: str
+ ) -> Any:
+ try:
+ return await self._try_send_step_action_event(action, event_type, payload)
+ except Exception as e:
+ # for step action events, send a failure event when we cannot send the completed event
+ if (
+ event_type == STEP_EVENT_TYPE_COMPLETED
+ or event_type == STEP_EVENT_TYPE_FAILED
+ ):
+ await self._try_send_step_action_event(
+ action,
+ STEP_EVENT_TYPE_FAILED,
+ "Failed to send finished event: " + str(e),
+ )
+
+ return
+
+ @tenacity_retry
+ async def _try_send_step_action_event(
+ self, action: Action, event_type: StepActionEventType, payload: str
+ ) -> Any:
+ eventTimestamp = Timestamp()
+ eventTimestamp.GetCurrentTime()
+
+ event = StepActionEvent(
+ workerId=action.worker_id,
+ jobId=action.job_id,
+ jobRunId=action.job_run_id,
+ stepId=action.step_id,
+ stepRunId=action.step_run_id,
+ actionId=action.action_id,
+ eventTimestamp=eventTimestamp,
+ eventType=event_type,
+ eventPayload=payload,
+ retryCount=action.retry_count,
+ )
+
+ ## TODO: What does this return?
+ return await self.aio_client.SendStepActionEvent(
+ event,
+ metadata=get_metadata(self.token),
+ )
+
+ async def send_group_key_action_event(
+ self, action: Action, event_type: GroupKeyActionEventType, payload: str
+ ) -> Any:
+ eventTimestamp = Timestamp()
+ eventTimestamp.GetCurrentTime()
+
+ event = GroupKeyActionEvent(
+ workerId=action.worker_id,
+ workflowRunId=action.workflow_run_id,
+ getGroupKeyRunId=action.get_group_key_run_id,
+ actionId=action.action_id,
+ eventTimestamp=eventTimestamp,
+ eventType=event_type,
+ eventPayload=payload,
+ )
+
+ ## TODO: What does this return?
+ return await self.aio_client.SendGroupKeyActionEvent(
+ event,
+ metadata=get_metadata(self.token),
+ )
+
+ def put_overrides_data(self, data: OverridesData) -> ActionEventResponse:
+ return cast(
+ ActionEventResponse,
+ self.client.PutOverridesData(
+ data,
+ metadata=get_metadata(self.token),
+ ),
+ )
+
+ def release_slot(self, step_run_id: str) -> None:
+ self.client.ReleaseSlot(
+ ReleaseSlotRequest(stepRunId=step_run_id),
+ timeout=DEFAULT_REGISTER_TIMEOUT,
+ metadata=get_metadata(self.token),
+ )
+
+ def refresh_timeout(self, step_run_id: str, increment_by: str) -> None:
+ self.client.RefreshTimeout(
+ RefreshTimeoutRequest(
+ stepRunId=step_run_id,
+ incrementTimeoutBy=increment_by,
+ ),
+ timeout=DEFAULT_REGISTER_TIMEOUT,
+ metadata=get_metadata(self.token),
+ )
+
+ def upsert_worker_labels(
+ self, worker_id: str | None, labels: dict[str, str | int]
+ ) -> None:
+ worker_labels = {}
+
+ for key, value in labels.items():
+ if isinstance(value, int):
+ worker_labels[key] = WorkerLabels(intValue=value)
+ else:
+ worker_labels[key] = WorkerLabels(strValue=str(value))
+
+ self.client.UpsertWorkerLabels(
+ UpsertWorkerLabelsRequest(workerId=worker_id, labels=worker_labels),
+ timeout=DEFAULT_REGISTER_TIMEOUT,
+ metadata=get_metadata(self.token),
+ )
+
+ async def async_upsert_worker_labels(
+ self,
+ worker_id: str | None,
+ labels: dict[str, str | int],
+ ) -> None:
+ worker_labels = {}
+
+ for key, value in labels.items():
+ if isinstance(value, int):
+ worker_labels[key] = WorkerLabels(intValue=value)
+ else:
+ worker_labels[key] = WorkerLabels(strValue=str(value))
+
+ await self.aio_client.UpsertWorkerLabels(
+ UpsertWorkerLabelsRequest(workerId=worker_id, labels=worker_labels),
+ timeout=DEFAULT_REGISTER_TIMEOUT,
+ metadata=get_metadata(self.token),
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/event_ts.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/event_ts.py
new file mode 100644
index 00000000..1d3c3978
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/event_ts.py
@@ -0,0 +1,28 @@
+import asyncio
+from typing import Any
+
+
+class Event_ts(asyncio.Event):
+ """
+ Event_ts is a subclass of asyncio.Event that allows for thread-safe setting and clearing of the event.
+ """
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ if self._loop is None:
+ self._loop = asyncio.get_event_loop()
+
+ def set(self):
+ if not self._loop.is_closed():
+ self._loop.call_soon_threadsafe(super().set)
+
+ def clear(self):
+ self._loop.call_soon_threadsafe(super().clear)
+
+
+async def read_with_interrupt(listener: Any, interrupt: Event_ts):
+ try:
+ result = await listener.read()
+ return result
+ finally:
+ interrupt.set()
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/events.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/events.py
new file mode 100644
index 00000000..cf6a2721
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/events.py
@@ -0,0 +1,183 @@
+import asyncio
+import datetime
+import json
+from typing import Any, Dict, List, Optional, TypedDict
+from uuid import uuid4
+
+import grpc
+from google.protobuf import timestamp_pb2
+
+from hatchet_sdk.clients.rest.tenacity_utils import tenacity_retry
+from hatchet_sdk.contracts.events_pb2 import (
+ BulkPushEventRequest,
+ Event,
+ PushEventRequest,
+ PutLogRequest,
+ PutStreamEventRequest,
+)
+from hatchet_sdk.contracts.events_pb2_grpc import EventsServiceStub
+from hatchet_sdk.utils.serialization import flatten
+
+from ..loader import ClientConfig
+from ..metadata import get_metadata
+
+
+def new_event(conn, config: ClientConfig):
+ return EventClient(
+ client=EventsServiceStub(conn),
+ config=config,
+ )
+
+
+def proto_timestamp_now():
+ t = datetime.datetime.now().timestamp()
+ seconds = int(t)
+ nanos = int(t % 1 * 1e9)
+
+ return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
+
+
+class PushEventOptions(TypedDict, total=False):
+ additional_metadata: Dict[str, str] | None = None
+ namespace: str | None = None
+
+
+class BulkPushEventOptions(TypedDict, total=False):
+ namespace: str | None = None
+
+
+class BulkPushEventWithMetadata(TypedDict, total=False):
+ key: str
+ payload: Any
+ additional_metadata: Optional[Dict[str, Any]] # Optional metadata
+
+
+class EventClient:
+ def __init__(self, client: EventsServiceStub, config: ClientConfig):
+ self.client = client
+ self.token = config.token
+ self.namespace = config.namespace
+
+ async def async_push(
+ self, event_key, payload, options: Optional[PushEventOptions] = None
+ ) -> Event:
+ return await asyncio.to_thread(
+ self.push, event_key=event_key, payload=payload, options=options
+ )
+
+ async def async_bulk_push(
+ self,
+ events: List[BulkPushEventWithMetadata],
+ options: Optional[BulkPushEventOptions] = None,
+ ) -> List[Event]:
+ return await asyncio.to_thread(self.bulk_push, events=events, options=options)
+
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
+ @tenacity_retry
+ def push(self, event_key, payload, options: PushEventOptions = None) -> Event:
+ namespace = self.namespace
+
+ if (
+ options is not None
+ and "namespace" in options
+ and options["namespace"] is not None
+ ):
+ namespace = options.pop("namespace")
+
+ namespaced_event_key = namespace + event_key
+
+ try:
+ meta = dict() if options is None else options["additional_metadata"]
+ meta_bytes = None if meta is None else json.dumps(meta).encode("utf-8")
+ except Exception as e:
+ raise ValueError(f"Error encoding meta: {e}")
+
+ try:
+ payload_bytes = json.dumps(payload).encode("utf-8")
+ except json.UnicodeEncodeError as e:
+ raise ValueError(f"Error encoding payload: {e}")
+
+ request = PushEventRequest(
+ key=namespaced_event_key,
+ payload=payload_bytes,
+ eventTimestamp=proto_timestamp_now(),
+ additionalMetadata=meta_bytes,
+ )
+
+ return self.client.Push(request, metadata=get_metadata(self.token))
+
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
+ @tenacity_retry
+ def bulk_push(
+ self,
+ events: List[BulkPushEventWithMetadata],
+ options: BulkPushEventOptions = None,
+ ) -> List[Event]:
+ namespace = self.namespace
+
+ if (
+ options is not None
+ and "namespace" in options
+ and options["namespace"] is not None
+ ):
+ namespace = options.pop("namespace")
+
+ bulk_events = []
+ for event in events:
+ event_key = namespace + event["key"]
+ payload = event["payload"]
+
+ try:
+ meta = event.get("additional_metadata", {})
+ meta_bytes = json.dumps(meta).encode("utf-8") if meta else None
+ except Exception as e:
+ raise ValueError(f"Error encoding meta: {e}")
+
+ try:
+ payload_bytes = json.dumps(payload).encode("utf-8")
+ except json.UnicodeEncodeError as e:
+ raise ValueError(f"Error encoding payload: {e}")
+
+ request = PushEventRequest(
+ key=event_key,
+ payload=payload_bytes,
+ eventTimestamp=proto_timestamp_now(),
+ additionalMetadata=meta_bytes,
+ )
+ bulk_events.append(request)
+
+ bulk_request = BulkPushEventRequest(events=bulk_events)
+
+ response = self.client.BulkPush(bulk_request, metadata=get_metadata(self.token))
+
+ return response.events
+
+ def log(self, message: str, step_run_id: str):
+ try:
+ request = PutLogRequest(
+ stepRunId=step_run_id,
+ createdAt=proto_timestamp_now(),
+ message=message,
+ )
+
+ self.client.PutLog(request, metadata=get_metadata(self.token))
+ except Exception as e:
+ raise ValueError(f"Error logging: {e}")
+
+ def stream(self, data: str | bytes, step_run_id: str):
+ try:
+ if isinstance(data, str):
+ data_bytes = data.encode("utf-8")
+ elif isinstance(data, bytes):
+ data_bytes = data
+ else:
+ raise ValueError("Invalid data type. Expected str, bytes, or file.")
+
+ request = PutStreamEventRequest(
+ stepRunId=step_run_id,
+ createdAt=proto_timestamp_now(),
+ message=data_bytes,
+ )
+ self.client.PutStreamEvent(request, metadata=get_metadata(self.token))
+ except Exception as e:
+ raise ValueError(f"Error putting stream event: {e}")
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/__init__.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/__init__.py
new file mode 100644
index 00000000..6d8bc27f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/__init__.py
@@ -0,0 +1,293 @@
+# coding: utf-8
+
+# flake8: noqa
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+__version__ = "1.0.0"
+
+# import apis into sdk package
+from hatchet_sdk.clients.rest.api.api_token_api import APITokenApi
+from hatchet_sdk.clients.rest.api.default_api import DefaultApi
+from hatchet_sdk.clients.rest.api.event_api import EventApi
+from hatchet_sdk.clients.rest.api.github_api import GithubApi
+from hatchet_sdk.clients.rest.api.healthcheck_api import HealthcheckApi
+from hatchet_sdk.clients.rest.api.log_api import LogApi
+from hatchet_sdk.clients.rest.api.metadata_api import MetadataApi
+from hatchet_sdk.clients.rest.api.rate_limits_api import RateLimitsApi
+from hatchet_sdk.clients.rest.api.slack_api import SlackApi
+from hatchet_sdk.clients.rest.api.sns_api import SNSApi
+from hatchet_sdk.clients.rest.api.step_run_api import StepRunApi
+from hatchet_sdk.clients.rest.api.tenant_api import TenantApi
+from hatchet_sdk.clients.rest.api.user_api import UserApi
+from hatchet_sdk.clients.rest.api.worker_api import WorkerApi
+from hatchet_sdk.clients.rest.api.workflow_api import WorkflowApi
+from hatchet_sdk.clients.rest.api.workflow_run_api import WorkflowRunApi
+from hatchet_sdk.clients.rest.api_client import ApiClient
+
+# import ApiClient
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.configuration import Configuration
+from hatchet_sdk.clients.rest.exceptions import (
+ ApiAttributeError,
+ ApiException,
+ ApiKeyError,
+ ApiTypeError,
+ ApiValueError,
+ OpenApiException,
+)
+from hatchet_sdk.clients.rest.models.accept_invite_request import AcceptInviteRequest
+
+# import models into sdk package
+from hatchet_sdk.clients.rest.models.api_error import APIError
+from hatchet_sdk.clients.rest.models.api_errors import APIErrors
+from hatchet_sdk.clients.rest.models.api_meta import APIMeta
+from hatchet_sdk.clients.rest.models.api_meta_auth import APIMetaAuth
+from hatchet_sdk.clients.rest.models.api_meta_integration import APIMetaIntegration
+from hatchet_sdk.clients.rest.models.api_meta_posthog import APIMetaPosthog
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.api_token import APIToken
+from hatchet_sdk.clients.rest.models.bulk_create_event_request import (
+ BulkCreateEventRequest,
+)
+from hatchet_sdk.clients.rest.models.cancel_event_request import CancelEventRequest
+from hatchet_sdk.clients.rest.models.concurrency_limit_strategy import (
+ ConcurrencyLimitStrategy,
+)
+from hatchet_sdk.clients.rest.models.create_api_token_request import (
+ CreateAPITokenRequest,
+)
+from hatchet_sdk.clients.rest.models.create_api_token_response import (
+ CreateAPITokenResponse,
+)
+from hatchet_sdk.clients.rest.models.create_cron_workflow_trigger_request import (
+ CreateCronWorkflowTriggerRequest,
+)
+from hatchet_sdk.clients.rest.models.create_event_request import CreateEventRequest
+from hatchet_sdk.clients.rest.models.create_pull_request_from_step_run import (
+ CreatePullRequestFromStepRun,
+)
+from hatchet_sdk.clients.rest.models.create_sns_integration_request import (
+ CreateSNSIntegrationRequest,
+)
+from hatchet_sdk.clients.rest.models.create_tenant_alert_email_group_request import (
+ CreateTenantAlertEmailGroupRequest,
+)
+from hatchet_sdk.clients.rest.models.create_tenant_invite_request import (
+ CreateTenantInviteRequest,
+)
+from hatchet_sdk.clients.rest.models.create_tenant_request import CreateTenantRequest
+from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows
+from hatchet_sdk.clients.rest.models.cron_workflows_list import CronWorkflowsList
+from hatchet_sdk.clients.rest.models.cron_workflows_method import CronWorkflowsMethod
+from hatchet_sdk.clients.rest.models.cron_workflows_order_by_field import (
+ CronWorkflowsOrderByField,
+)
+from hatchet_sdk.clients.rest.models.event import Event
+from hatchet_sdk.clients.rest.models.event_data import EventData
+from hatchet_sdk.clients.rest.models.event_key_list import EventKeyList
+from hatchet_sdk.clients.rest.models.event_list import EventList
+from hatchet_sdk.clients.rest.models.event_order_by_direction import (
+ EventOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.event_order_by_field import EventOrderByField
+from hatchet_sdk.clients.rest.models.event_update_cancel200_response import (
+ EventUpdateCancel200Response,
+)
+from hatchet_sdk.clients.rest.models.event_workflow_run_summary import (
+ EventWorkflowRunSummary,
+)
+from hatchet_sdk.clients.rest.models.events import Events
+from hatchet_sdk.clients.rest.models.get_step_run_diff_response import (
+ GetStepRunDiffResponse,
+)
+from hatchet_sdk.clients.rest.models.info_get_version200_response import (
+ InfoGetVersion200Response,
+)
+from hatchet_sdk.clients.rest.models.job import Job
+from hatchet_sdk.clients.rest.models.job_run import JobRun
+from hatchet_sdk.clients.rest.models.job_run_status import JobRunStatus
+from hatchet_sdk.clients.rest.models.list_api_tokens_response import (
+ ListAPITokensResponse,
+)
+from hatchet_sdk.clients.rest.models.list_pull_requests_response import (
+ ListPullRequestsResponse,
+)
+from hatchet_sdk.clients.rest.models.list_slack_webhooks import ListSlackWebhooks
+from hatchet_sdk.clients.rest.models.list_sns_integrations import ListSNSIntegrations
+from hatchet_sdk.clients.rest.models.log_line import LogLine
+from hatchet_sdk.clients.rest.models.log_line_level import LogLineLevel
+from hatchet_sdk.clients.rest.models.log_line_list import LogLineList
+from hatchet_sdk.clients.rest.models.log_line_order_by_direction import (
+ LogLineOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.log_line_order_by_field import LogLineOrderByField
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.pull_request import PullRequest
+from hatchet_sdk.clients.rest.models.pull_request_state import PullRequestState
+from hatchet_sdk.clients.rest.models.queue_metrics import QueueMetrics
+from hatchet_sdk.clients.rest.models.rate_limit import RateLimit
+from hatchet_sdk.clients.rest.models.rate_limit_list import RateLimitList
+from hatchet_sdk.clients.rest.models.rate_limit_order_by_direction import (
+ RateLimitOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.rate_limit_order_by_field import (
+ RateLimitOrderByField,
+)
+from hatchet_sdk.clients.rest.models.recent_step_runs import RecentStepRuns
+from hatchet_sdk.clients.rest.models.reject_invite_request import RejectInviteRequest
+from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest
+from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import (
+ ReplayWorkflowRunsRequest,
+)
+from hatchet_sdk.clients.rest.models.replay_workflow_runs_response import (
+ ReplayWorkflowRunsResponse,
+)
+from hatchet_sdk.clients.rest.models.rerun_step_run_request import RerunStepRunRequest
+from hatchet_sdk.clients.rest.models.schedule_workflow_run_request import (
+ ScheduleWorkflowRunRequest,
+)
+from hatchet_sdk.clients.rest.models.scheduled_run_status import ScheduledRunStatus
+from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows
+from hatchet_sdk.clients.rest.models.scheduled_workflows_list import (
+ ScheduledWorkflowsList,
+)
+from hatchet_sdk.clients.rest.models.scheduled_workflows_method import (
+ ScheduledWorkflowsMethod,
+)
+from hatchet_sdk.clients.rest.models.scheduled_workflows_order_by_field import (
+ ScheduledWorkflowsOrderByField,
+)
+from hatchet_sdk.clients.rest.models.semaphore_slots import SemaphoreSlots
+from hatchet_sdk.clients.rest.models.slack_webhook import SlackWebhook
+from hatchet_sdk.clients.rest.models.sns_integration import SNSIntegration
+from hatchet_sdk.clients.rest.models.step import Step
+from hatchet_sdk.clients.rest.models.step_run import StepRun
+from hatchet_sdk.clients.rest.models.step_run_archive import StepRunArchive
+from hatchet_sdk.clients.rest.models.step_run_archive_list import StepRunArchiveList
+from hatchet_sdk.clients.rest.models.step_run_diff import StepRunDiff
+from hatchet_sdk.clients.rest.models.step_run_event import StepRunEvent
+from hatchet_sdk.clients.rest.models.step_run_event_list import StepRunEventList
+from hatchet_sdk.clients.rest.models.step_run_event_reason import StepRunEventReason
+from hatchet_sdk.clients.rest.models.step_run_event_severity import StepRunEventSeverity
+from hatchet_sdk.clients.rest.models.step_run_status import StepRunStatus
+from hatchet_sdk.clients.rest.models.tenant import Tenant
+from hatchet_sdk.clients.rest.models.tenant_alert_email_group import (
+ TenantAlertEmailGroup,
+)
+from hatchet_sdk.clients.rest.models.tenant_alert_email_group_list import (
+ TenantAlertEmailGroupList,
+)
+from hatchet_sdk.clients.rest.models.tenant_alerting_settings import (
+ TenantAlertingSettings,
+)
+from hatchet_sdk.clients.rest.models.tenant_invite import TenantInvite
+from hatchet_sdk.clients.rest.models.tenant_invite_list import TenantInviteList
+from hatchet_sdk.clients.rest.models.tenant_list import TenantList
+from hatchet_sdk.clients.rest.models.tenant_member import TenantMember
+from hatchet_sdk.clients.rest.models.tenant_member_list import TenantMemberList
+from hatchet_sdk.clients.rest.models.tenant_member_role import TenantMemberRole
+from hatchet_sdk.clients.rest.models.tenant_queue_metrics import TenantQueueMetrics
+from hatchet_sdk.clients.rest.models.tenant_resource import TenantResource
+from hatchet_sdk.clients.rest.models.tenant_resource_limit import TenantResourceLimit
+from hatchet_sdk.clients.rest.models.tenant_resource_policy import TenantResourcePolicy
+from hatchet_sdk.clients.rest.models.tenant_step_run_queue_metrics import (
+ TenantStepRunQueueMetrics,
+)
+from hatchet_sdk.clients.rest.models.trigger_workflow_run_request import (
+ TriggerWorkflowRunRequest,
+)
+from hatchet_sdk.clients.rest.models.update_tenant_alert_email_group_request import (
+ UpdateTenantAlertEmailGroupRequest,
+)
+from hatchet_sdk.clients.rest.models.update_tenant_invite_request import (
+ UpdateTenantInviteRequest,
+)
+from hatchet_sdk.clients.rest.models.update_tenant_request import UpdateTenantRequest
+from hatchet_sdk.clients.rest.models.update_worker_request import UpdateWorkerRequest
+from hatchet_sdk.clients.rest.models.user import User
+from hatchet_sdk.clients.rest.models.user_change_password_request import (
+ UserChangePasswordRequest,
+)
+from hatchet_sdk.clients.rest.models.user_login_request import UserLoginRequest
+from hatchet_sdk.clients.rest.models.user_register_request import UserRegisterRequest
+from hatchet_sdk.clients.rest.models.user_tenant_memberships_list import (
+ UserTenantMembershipsList,
+)
+from hatchet_sdk.clients.rest.models.user_tenant_public import UserTenantPublic
+from hatchet_sdk.clients.rest.models.webhook_worker import WebhookWorker
+from hatchet_sdk.clients.rest.models.webhook_worker_create_request import (
+ WebhookWorkerCreateRequest,
+)
+from hatchet_sdk.clients.rest.models.webhook_worker_create_response import (
+ WebhookWorkerCreateResponse,
+)
+from hatchet_sdk.clients.rest.models.webhook_worker_created import WebhookWorkerCreated
+from hatchet_sdk.clients.rest.models.webhook_worker_list_response import (
+ WebhookWorkerListResponse,
+)
+from hatchet_sdk.clients.rest.models.webhook_worker_request import WebhookWorkerRequest
+from hatchet_sdk.clients.rest.models.webhook_worker_request_list_response import (
+ WebhookWorkerRequestListResponse,
+)
+from hatchet_sdk.clients.rest.models.webhook_worker_request_method import (
+ WebhookWorkerRequestMethod,
+)
+from hatchet_sdk.clients.rest.models.worker import Worker
+from hatchet_sdk.clients.rest.models.worker_label import WorkerLabel
+from hatchet_sdk.clients.rest.models.worker_list import WorkerList
+from hatchet_sdk.clients.rest.models.worker_runtime_info import WorkerRuntimeInfo
+from hatchet_sdk.clients.rest.models.worker_runtime_sdks import WorkerRuntimeSDKs
+from hatchet_sdk.clients.rest.models.worker_type import WorkerType
+from hatchet_sdk.clients.rest.models.workflow import Workflow
+from hatchet_sdk.clients.rest.models.workflow_concurrency import WorkflowConcurrency
+from hatchet_sdk.clients.rest.models.workflow_kind import WorkflowKind
+from hatchet_sdk.clients.rest.models.workflow_list import WorkflowList
+from hatchet_sdk.clients.rest.models.workflow_metrics import WorkflowMetrics
+from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun
+from hatchet_sdk.clients.rest.models.workflow_run_list import WorkflowRunList
+from hatchet_sdk.clients.rest.models.workflow_run_order_by_direction import (
+ WorkflowRunOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.workflow_run_order_by_field import (
+ WorkflowRunOrderByField,
+)
+from hatchet_sdk.clients.rest.models.workflow_run_shape import WorkflowRunShape
+from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus
+from hatchet_sdk.clients.rest.models.workflow_run_triggered_by import (
+ WorkflowRunTriggeredBy,
+)
+from hatchet_sdk.clients.rest.models.workflow_runs_cancel_request import (
+ WorkflowRunsCancelRequest,
+)
+from hatchet_sdk.clients.rest.models.workflow_runs_metrics import WorkflowRunsMetrics
+from hatchet_sdk.clients.rest.models.workflow_runs_metrics_counts import (
+ WorkflowRunsMetricsCounts,
+)
+from hatchet_sdk.clients.rest.models.workflow_tag import WorkflowTag
+from hatchet_sdk.clients.rest.models.workflow_trigger_cron_ref import (
+ WorkflowTriggerCronRef,
+)
+from hatchet_sdk.clients.rest.models.workflow_trigger_event_ref import (
+ WorkflowTriggerEventRef,
+)
+from hatchet_sdk.clients.rest.models.workflow_triggers import WorkflowTriggers
+from hatchet_sdk.clients.rest.models.workflow_update_request import (
+ WorkflowUpdateRequest,
+)
+from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion
+from hatchet_sdk.clients.rest.models.workflow_version_definition import (
+ WorkflowVersionDefinition,
+)
+from hatchet_sdk.clients.rest.models.workflow_version_meta import WorkflowVersionMeta
+from hatchet_sdk.clients.rest.models.workflow_workers_count import WorkflowWorkersCount
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/__init__.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/__init__.py
new file mode 100644
index 00000000..f6ecbe38
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/__init__.py
@@ -0,0 +1,19 @@
+# flake8: noqa
+
+# import apis into api package
+from hatchet_sdk.clients.rest.api.api_token_api import APITokenApi
+from hatchet_sdk.clients.rest.api.default_api import DefaultApi
+from hatchet_sdk.clients.rest.api.event_api import EventApi
+from hatchet_sdk.clients.rest.api.github_api import GithubApi
+from hatchet_sdk.clients.rest.api.healthcheck_api import HealthcheckApi
+from hatchet_sdk.clients.rest.api.log_api import LogApi
+from hatchet_sdk.clients.rest.api.metadata_api import MetadataApi
+from hatchet_sdk.clients.rest.api.rate_limits_api import RateLimitsApi
+from hatchet_sdk.clients.rest.api.slack_api import SlackApi
+from hatchet_sdk.clients.rest.api.sns_api import SNSApi
+from hatchet_sdk.clients.rest.api.step_run_api import StepRunApi
+from hatchet_sdk.clients.rest.api.tenant_api import TenantApi
+from hatchet_sdk.clients.rest.api.user_api import UserApi
+from hatchet_sdk.clients.rest.api.worker_api import WorkerApi
+from hatchet_sdk.clients.rest.api.workflow_api import WorkflowApi
+from hatchet_sdk.clients.rest.api.workflow_run_api import WorkflowRunApi
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/api_token_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/api_token_api.py
new file mode 100644
index 00000000..054ccc6b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/api_token_api.py
@@ -0,0 +1,858 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.create_api_token_request import (
+ CreateAPITokenRequest,
+)
+from hatchet_sdk.clients.rest.models.create_api_token_response import (
+ CreateAPITokenResponse,
+)
+from hatchet_sdk.clients.rest.models.list_api_tokens_response import (
+ ListAPITokensResponse,
+)
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class APITokenApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def api_token_create(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_api_token_request: Optional[CreateAPITokenRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> CreateAPITokenResponse:
+ """Create API Token
+
+ Create an API token for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_api_token_request:
+ :type create_api_token_request: CreateAPITokenRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._api_token_create_serialize(
+ tenant=tenant,
+ create_api_token_request=create_api_token_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "CreateAPITokenResponse",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def api_token_create_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_api_token_request: Optional[CreateAPITokenRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[CreateAPITokenResponse]:
+ """Create API Token
+
+ Create an API token for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_api_token_request:
+ :type create_api_token_request: CreateAPITokenRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._api_token_create_serialize(
+ tenant=tenant,
+ create_api_token_request=create_api_token_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "CreateAPITokenResponse",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def api_token_create_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_api_token_request: Optional[CreateAPITokenRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Create API Token
+
+ Create an API token for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_api_token_request:
+ :type create_api_token_request: CreateAPITokenRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._api_token_create_serialize(
+ tenant=tenant,
+ create_api_token_request=create_api_token_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "CreateAPITokenResponse",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _api_token_create_serialize(
+ self,
+ tenant,
+ create_api_token_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if create_api_token_request is not None:
+ _body_params = create_api_token_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/api-tokens",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def api_token_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ListAPITokensResponse:
+ """List API Tokens
+
+ List API tokens for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._api_token_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ListAPITokensResponse",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def api_token_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[ListAPITokensResponse]:
+ """List API Tokens
+
+ List API tokens for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._api_token_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ListAPITokensResponse",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def api_token_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List API Tokens
+
+ List API tokens for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._api_token_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ListAPITokensResponse",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _api_token_list_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/api-tokens",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def api_token_update_revoke(
+ self,
+ api_token: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The API token"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Revoke API Token
+
+ Revoke an API token for a tenant
+
+ :param api_token: The API token (required)
+ :type api_token: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._api_token_update_revoke_serialize(
+ api_token=api_token,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def api_token_update_revoke_with_http_info(
+ self,
+ api_token: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The API token"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Revoke API Token
+
+ Revoke an API token for a tenant
+
+ :param api_token: The API token (required)
+ :type api_token: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._api_token_update_revoke_serialize(
+ api_token=api_token,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def api_token_update_revoke_without_preload_content(
+ self,
+ api_token: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The API token"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Revoke API Token
+
+ Revoke an API token for a tenant
+
+ :param api_token: The API token (required)
+ :type api_token: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._api_token_update_revoke_serialize(
+ api_token=api_token,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _api_token_update_revoke_serialize(
+ self,
+ api_token,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if api_token is not None:
+ _path_params["api-token"] = api_token
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/api-tokens/{api-token}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/default_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/default_api.py
new file mode 100644
index 00000000..c53e7f3d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/default_api.py
@@ -0,0 +1,2257 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.info_get_version200_response import (
+ InfoGetVersion200Response,
+)
+from hatchet_sdk.clients.rest.models.tenant_invite import TenantInvite
+from hatchet_sdk.clients.rest.models.update_tenant_invite_request import (
+ UpdateTenantInviteRequest,
+)
+from hatchet_sdk.clients.rest.models.webhook_worker_create_request import (
+ WebhookWorkerCreateRequest,
+)
+from hatchet_sdk.clients.rest.models.webhook_worker_created import WebhookWorkerCreated
+from hatchet_sdk.clients.rest.models.webhook_worker_list_response import (
+ WebhookWorkerListResponse,
+)
+from hatchet_sdk.clients.rest.models.webhook_worker_request_list_response import (
+ WebhookWorkerRequestListResponse,
+)
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class DefaultApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def info_get_version(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> InfoGetVersion200Response:
+ """We return the version for the currently running server
+
+ Get the version of the server
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._info_get_version_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "InfoGetVersion200Response",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def info_get_version_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[InfoGetVersion200Response]:
+ """We return the version for the currently running server
+
+ Get the version of the server
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._info_get_version_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "InfoGetVersion200Response",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def info_get_version_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """We return the version for the currently running server
+
+ Get the version of the server
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._info_get_version_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "InfoGetVersion200Response",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _info_get_version_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = []
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/version",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def monitoring_post_run_probe(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Detailed Health Probe For the Instance
+
+ Triggers a workflow to check the status of the instance
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._monitoring_post_run_probe_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def monitoring_post_run_probe_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Detailed Health Probe For the Instance
+
+ Triggers a workflow to check the status of the instance
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._monitoring_post_run_probe_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def monitoring_post_run_probe_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Detailed Health Probe For the Instance
+
+ Triggers a workflow to check the status of the instance
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._monitoring_post_run_probe_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _monitoring_post_run_probe_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/monitoring/{tenant}/probe",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_invite_delete(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ tenant_invite: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant invite id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantInvite:
+ """Delete invite
+
+ Deletes a tenant invite
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param tenant_invite: The tenant invite id (required)
+ :type tenant_invite: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_delete_serialize(
+ tenant=tenant,
+ tenant_invite=tenant_invite,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantInvite",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_invite_delete_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ tenant_invite: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant invite id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantInvite]:
+ """Delete invite
+
+ Deletes a tenant invite
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param tenant_invite: The tenant invite id (required)
+ :type tenant_invite: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_delete_serialize(
+ tenant=tenant,
+ tenant_invite=tenant_invite,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantInvite",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_invite_delete_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ tenant_invite: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant invite id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Delete invite
+
+ Deletes a tenant invite
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param tenant_invite: The tenant invite id (required)
+ :type tenant_invite: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_delete_serialize(
+ tenant=tenant,
+ tenant_invite=tenant_invite,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantInvite",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_invite_delete_serialize(
+ self,
+ tenant,
+ tenant_invite,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if tenant_invite is not None:
+ _path_params["tenant-invite"] = tenant_invite
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="DELETE",
+ resource_path="/api/v1/tenants/{tenant}/invites/{tenant-invite}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_invite_update(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ tenant_invite: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant invite id",
+ ),
+ ],
+ update_tenant_invite_request: Annotated[
+ UpdateTenantInviteRequest, Field(description="The tenant invite to update")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantInvite:
+ """Update invite
+
+ Updates a tenant invite
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param tenant_invite: The tenant invite id (required)
+ :type tenant_invite: str
+ :param update_tenant_invite_request: The tenant invite to update (required)
+ :type update_tenant_invite_request: UpdateTenantInviteRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_update_serialize(
+ tenant=tenant,
+ tenant_invite=tenant_invite,
+ update_tenant_invite_request=update_tenant_invite_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantInvite",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_invite_update_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ tenant_invite: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant invite id",
+ ),
+ ],
+ update_tenant_invite_request: Annotated[
+ UpdateTenantInviteRequest, Field(description="The tenant invite to update")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantInvite]:
+ """Update invite
+
+ Updates a tenant invite
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param tenant_invite: The tenant invite id (required)
+ :type tenant_invite: str
+ :param update_tenant_invite_request: The tenant invite to update (required)
+ :type update_tenant_invite_request: UpdateTenantInviteRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_update_serialize(
+ tenant=tenant,
+ tenant_invite=tenant_invite,
+ update_tenant_invite_request=update_tenant_invite_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantInvite",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_invite_update_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ tenant_invite: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant invite id",
+ ),
+ ],
+ update_tenant_invite_request: Annotated[
+ UpdateTenantInviteRequest, Field(description="The tenant invite to update")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Update invite
+
+ Updates a tenant invite
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param tenant_invite: The tenant invite id (required)
+ :type tenant_invite: str
+ :param update_tenant_invite_request: The tenant invite to update (required)
+ :type update_tenant_invite_request: UpdateTenantInviteRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_update_serialize(
+ tenant=tenant,
+ tenant_invite=tenant_invite,
+ update_tenant_invite_request=update_tenant_invite_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantInvite",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_invite_update_serialize(
+ self,
+ tenant,
+ tenant_invite,
+ update_tenant_invite_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if tenant_invite is not None:
+ _path_params["tenant-invite"] = tenant_invite
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if update_tenant_invite_request is not None:
+ _body_params = update_tenant_invite_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="PATCH",
+ resource_path="/api/v1/tenants/{tenant}/invites/{tenant-invite}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def webhook_create(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ webhook_worker_create_request: Optional[WebhookWorkerCreateRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WebhookWorkerCreated:
+ """Create a webhook
+
+ Creates a webhook
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param webhook_worker_create_request:
+ :type webhook_worker_create_request: WebhookWorkerCreateRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._webhook_create_serialize(
+ tenant=tenant,
+ webhook_worker_create_request=webhook_worker_create_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WebhookWorkerCreated",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def webhook_create_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ webhook_worker_create_request: Optional[WebhookWorkerCreateRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WebhookWorkerCreated]:
+ """Create a webhook
+
+ Creates a webhook
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param webhook_worker_create_request:
+ :type webhook_worker_create_request: WebhookWorkerCreateRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._webhook_create_serialize(
+ tenant=tenant,
+ webhook_worker_create_request=webhook_worker_create_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WebhookWorkerCreated",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def webhook_create_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ webhook_worker_create_request: Optional[WebhookWorkerCreateRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Create a webhook
+
+ Creates a webhook
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param webhook_worker_create_request:
+ :type webhook_worker_create_request: WebhookWorkerCreateRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._webhook_create_serialize(
+ tenant=tenant,
+ webhook_worker_create_request=webhook_worker_create_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WebhookWorkerCreated",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _webhook_create_serialize(
+ self,
+ tenant,
+ webhook_worker_create_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if webhook_worker_create_request is not None:
+ _body_params = webhook_worker_create_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/webhook-workers",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def webhook_delete(
+ self,
+ webhook: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The webhook id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Delete a webhook
+
+ Deletes a webhook
+
+ :param webhook: The webhook id (required)
+ :type webhook: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._webhook_delete_serialize(
+ webhook=webhook,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def webhook_delete_with_http_info(
+ self,
+ webhook: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The webhook id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Delete a webhook
+
+ Deletes a webhook
+
+ :param webhook: The webhook id (required)
+ :type webhook: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._webhook_delete_serialize(
+ webhook=webhook,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def webhook_delete_without_preload_content(
+ self,
+ webhook: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The webhook id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Delete a webhook
+
+ Deletes a webhook
+
+ :param webhook: The webhook id (required)
+ :type webhook: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._webhook_delete_serialize(
+ webhook=webhook,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _webhook_delete_serialize(
+ self,
+ webhook,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if webhook is not None:
+ _path_params["webhook"] = webhook
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="DELETE",
+ resource_path="/api/v1/webhook-workers/{webhook}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def webhook_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WebhookWorkerListResponse:
+ """List webhooks
+
+ Lists all webhooks
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._webhook_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WebhookWorkerListResponse",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def webhook_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WebhookWorkerListResponse]:
+ """List webhooks
+
+ Lists all webhooks
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._webhook_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WebhookWorkerListResponse",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def webhook_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List webhooks
+
+ Lists all webhooks
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._webhook_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WebhookWorkerListResponse",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _webhook_list_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/webhook-workers",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def webhook_requests_list(
+ self,
+ webhook: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The webhook id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WebhookWorkerRequestListResponse:
+ """List webhook requests
+
+ Lists all requests for a webhook
+
+ :param webhook: The webhook id (required)
+ :type webhook: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._webhook_requests_list_serialize(
+ webhook=webhook,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WebhookWorkerRequestListResponse",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def webhook_requests_list_with_http_info(
+ self,
+ webhook: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The webhook id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WebhookWorkerRequestListResponse]:
+ """List webhook requests
+
+ Lists all requests for a webhook
+
+ :param webhook: The webhook id (required)
+ :type webhook: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._webhook_requests_list_serialize(
+ webhook=webhook,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WebhookWorkerRequestListResponse",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def webhook_requests_list_without_preload_content(
+ self,
+ webhook: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The webhook id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List webhook requests
+
+ Lists all requests for a webhook
+
+ :param webhook: The webhook id (required)
+ :type webhook: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._webhook_requests_list_serialize(
+ webhook=webhook,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WebhookWorkerRequestListResponse",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _webhook_requests_list_serialize(
+ self,
+ webhook,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if webhook is not None:
+ _path_params["webhook"] = webhook
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/webhook-workers/{webhook}/requests",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/event_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/event_api.py
new file mode 100644
index 00000000..d66485e3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/event_api.py
@@ -0,0 +1,2548 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.bulk_create_event_request import (
+ BulkCreateEventRequest,
+)
+from hatchet_sdk.clients.rest.models.cancel_event_request import CancelEventRequest
+from hatchet_sdk.clients.rest.models.create_event_request import CreateEventRequest
+from hatchet_sdk.clients.rest.models.event import Event
+from hatchet_sdk.clients.rest.models.event_data import EventData
+from hatchet_sdk.clients.rest.models.event_key_list import EventKeyList
+from hatchet_sdk.clients.rest.models.event_list import EventList
+from hatchet_sdk.clients.rest.models.event_order_by_direction import (
+ EventOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.event_order_by_field import EventOrderByField
+from hatchet_sdk.clients.rest.models.event_update_cancel200_response import (
+ EventUpdateCancel200Response,
+)
+from hatchet_sdk.clients.rest.models.events import Events
+from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest
+from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class EventApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def event_create(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_event_request: Annotated[
+ CreateEventRequest, Field(description="The event to create")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> Event:
+ """Create event
+
+ Creates a new event.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_event_request: The event to create (required)
+ :type create_event_request: CreateEventRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_create_serialize(
+ tenant=tenant,
+ create_event_request=create_event_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Event",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def event_create_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_event_request: Annotated[
+ CreateEventRequest, Field(description="The event to create")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[Event]:
+ """Create event
+
+ Creates a new event.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_event_request: The event to create (required)
+ :type create_event_request: CreateEventRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_create_serialize(
+ tenant=tenant,
+ create_event_request=create_event_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Event",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def event_create_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_event_request: Annotated[
+ CreateEventRequest, Field(description="The event to create")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Create event
+
+ Creates a new event.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_event_request: The event to create (required)
+ :type create_event_request: CreateEventRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_create_serialize(
+ tenant=tenant,
+ create_event_request=create_event_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Event",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _event_create_serialize(
+ self,
+ tenant,
+ create_event_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if create_event_request is not None:
+ _body_params = create_event_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/events",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def event_create_bulk(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ bulk_create_event_request: Annotated[
+ BulkCreateEventRequest, Field(description="The events to create")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> Events:
+ """Bulk Create events
+
+ Bulk creates new events.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param bulk_create_event_request: The events to create (required)
+ :type bulk_create_event_request: BulkCreateEventRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_create_bulk_serialize(
+ tenant=tenant,
+ bulk_create_event_request=bulk_create_event_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Events",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def event_create_bulk_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ bulk_create_event_request: Annotated[
+ BulkCreateEventRequest, Field(description="The events to create")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[Events]:
+ """Bulk Create events
+
+ Bulk creates new events.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param bulk_create_event_request: The events to create (required)
+ :type bulk_create_event_request: BulkCreateEventRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_create_bulk_serialize(
+ tenant=tenant,
+ bulk_create_event_request=bulk_create_event_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Events",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def event_create_bulk_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ bulk_create_event_request: Annotated[
+ BulkCreateEventRequest, Field(description="The events to create")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Bulk Create events
+
+ Bulk creates new events.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param bulk_create_event_request: The events to create (required)
+ :type bulk_create_event_request: BulkCreateEventRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_create_bulk_serialize(
+ tenant=tenant,
+ bulk_create_event_request=bulk_create_event_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Events",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _event_create_bulk_serialize(
+ self,
+ tenant,
+ bulk_create_event_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if bulk_create_event_request is not None:
+ _body_params = bulk_create_event_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/events/bulk",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def event_data_get(
+ self,
+ event: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The event id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> EventData:
+ """Get event data
+
+ Get the data for an event.
+
+ :param event: The event id (required)
+ :type event: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_data_get_serialize(
+ event=event,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventData",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def event_data_get_with_http_info(
+ self,
+ event: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The event id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[EventData]:
+ """Get event data
+
+ Get the data for an event.
+
+ :param event: The event id (required)
+ :type event: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_data_get_serialize(
+ event=event,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventData",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def event_data_get_without_preload_content(
+ self,
+ event: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The event id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get event data
+
+ Get the data for an event.
+
+ :param event: The event id (required)
+ :type event: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_data_get_serialize(
+ event=event,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventData",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _event_data_get_serialize(
+ self,
+ event,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if event is not None:
+ _path_params["event"] = event
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/events/{event}/data",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def event_get(
+ self,
+ event: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The event id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> Event:
+ """Get event data
+
+ Get an event.
+
+ :param event: The event id (required)
+ :type event: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_get_serialize(
+ event=event,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Event",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def event_get_with_http_info(
+ self,
+ event: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The event id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[Event]:
+ """Get event data
+
+ Get an event.
+
+ :param event: The event id (required)
+ :type event: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_get_serialize(
+ event=event,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Event",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def event_get_without_preload_content(
+ self,
+ event: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The event id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get event data
+
+ Get an event.
+
+ :param event: The event id (required)
+ :type event: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_get_serialize(
+ event=event,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Event",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _event_get_serialize(
+ self,
+ event,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if event is not None:
+ _path_params["event"] = event
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/events/{event}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def event_key_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> EventKeyList:
+ """List event keys
+
+ Lists all event keys for a tenant.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_key_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventKeyList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def event_key_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[EventKeyList]:
+ """List event keys
+
+ Lists all event keys for a tenant.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_key_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventKeyList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def event_key_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List event keys
+
+ Lists all event keys for a tenant.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_key_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventKeyList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _event_key_list_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/events/keys",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def event_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ keys: Annotated[
+ Optional[List[StrictStr]], Field(description="A list of keys to filter by")
+ ] = None,
+ workflows: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of workflow IDs to filter by"),
+ ] = None,
+ statuses: Annotated[
+ Optional[List[WorkflowRunStatus]],
+ Field(description="A list of workflow run statuses to filter by"),
+ ] = None,
+ search: Annotated[
+ Optional[StrictStr], Field(description="The search query to filter for")
+ ] = None,
+ order_by_field: Annotated[
+ Optional[EventOrderByField], Field(description="What to order by")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[EventOrderByDirection], Field(description="The order direction")
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ event_ids: Annotated[
+ Optional[
+ List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]]
+ ],
+ Field(description="A list of event ids to filter by"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> EventList:
+ """List events
+
+ Lists all events for a tenant.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param keys: A list of keys to filter by
+ :type keys: List[str]
+ :param workflows: A list of workflow IDs to filter by
+ :type workflows: List[str]
+ :param statuses: A list of workflow run statuses to filter by
+ :type statuses: List[WorkflowRunStatus]
+ :param search: The search query to filter for
+ :type search: str
+ :param order_by_field: What to order by
+ :type order_by_field: EventOrderByField
+ :param order_by_direction: The order direction
+ :type order_by_direction: EventOrderByDirection
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param event_ids: A list of event ids to filter by
+ :type event_ids: List[str]
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ keys=keys,
+ workflows=workflows,
+ statuses=statuses,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ additional_metadata=additional_metadata,
+ event_ids=event_ids,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def event_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ keys: Annotated[
+ Optional[List[StrictStr]], Field(description="A list of keys to filter by")
+ ] = None,
+ workflows: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of workflow IDs to filter by"),
+ ] = None,
+ statuses: Annotated[
+ Optional[List[WorkflowRunStatus]],
+ Field(description="A list of workflow run statuses to filter by"),
+ ] = None,
+ search: Annotated[
+ Optional[StrictStr], Field(description="The search query to filter for")
+ ] = None,
+ order_by_field: Annotated[
+ Optional[EventOrderByField], Field(description="What to order by")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[EventOrderByDirection], Field(description="The order direction")
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ event_ids: Annotated[
+ Optional[
+ List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]]
+ ],
+ Field(description="A list of event ids to filter by"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[EventList]:
+ """List events
+
+ Lists all events for a tenant.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param keys: A list of keys to filter by
+ :type keys: List[str]
+ :param workflows: A list of workflow IDs to filter by
+ :type workflows: List[str]
+ :param statuses: A list of workflow run statuses to filter by
+ :type statuses: List[WorkflowRunStatus]
+ :param search: The search query to filter for
+ :type search: str
+ :param order_by_field: What to order by
+ :type order_by_field: EventOrderByField
+ :param order_by_direction: The order direction
+ :type order_by_direction: EventOrderByDirection
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param event_ids: A list of event ids to filter by
+ :type event_ids: List[str]
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ keys=keys,
+ workflows=workflows,
+ statuses=statuses,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ additional_metadata=additional_metadata,
+ event_ids=event_ids,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def event_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ keys: Annotated[
+ Optional[List[StrictStr]], Field(description="A list of keys to filter by")
+ ] = None,
+ workflows: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of workflow IDs to filter by"),
+ ] = None,
+ statuses: Annotated[
+ Optional[List[WorkflowRunStatus]],
+ Field(description="A list of workflow run statuses to filter by"),
+ ] = None,
+ search: Annotated[
+ Optional[StrictStr], Field(description="The search query to filter for")
+ ] = None,
+ order_by_field: Annotated[
+ Optional[EventOrderByField], Field(description="What to order by")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[EventOrderByDirection], Field(description="The order direction")
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ event_ids: Annotated[
+ Optional[
+ List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]]
+ ],
+ Field(description="A list of event ids to filter by"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List events
+
+ Lists all events for a tenant.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param keys: A list of keys to filter by
+ :type keys: List[str]
+ :param workflows: A list of workflow IDs to filter by
+ :type workflows: List[str]
+ :param statuses: A list of workflow run statuses to filter by
+ :type statuses: List[WorkflowRunStatus]
+ :param search: The search query to filter for
+ :type search: str
+ :param order_by_field: What to order by
+ :type order_by_field: EventOrderByField
+ :param order_by_direction: The order direction
+ :type order_by_direction: EventOrderByDirection
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param event_ids: A list of event ids to filter by
+ :type event_ids: List[str]
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ keys=keys,
+ workflows=workflows,
+ statuses=statuses,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ additional_metadata=additional_metadata,
+ event_ids=event_ids,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _event_list_serialize(
+ self,
+ tenant,
+ offset,
+ limit,
+ keys,
+ workflows,
+ statuses,
+ search,
+ order_by_field,
+ order_by_direction,
+ additional_metadata,
+ event_ids,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ "keys": "multi",
+ "workflows": "multi",
+ "statuses": "multi",
+ "additionalMetadata": "multi",
+ "eventIds": "multi",
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ if offset is not None:
+
+ _query_params.append(("offset", offset))
+
+ if limit is not None:
+
+ _query_params.append(("limit", limit))
+
+ if keys is not None:
+
+ _query_params.append(("keys", keys))
+
+ if workflows is not None:
+
+ _query_params.append(("workflows", workflows))
+
+ if statuses is not None:
+
+ _query_params.append(("statuses", statuses))
+
+ if search is not None:
+
+ _query_params.append(("search", search))
+
+ if order_by_field is not None:
+
+ _query_params.append(("orderByField", order_by_field.value))
+
+ if order_by_direction is not None:
+
+ _query_params.append(("orderByDirection", order_by_direction.value))
+
+ if additional_metadata is not None:
+
+ _query_params.append(("additionalMetadata", additional_metadata))
+
+ if event_ids is not None:
+
+ _query_params.append(("eventIds", event_ids))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/events",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def event_update_cancel(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ cancel_event_request: Annotated[
+ CancelEventRequest, Field(description="The event ids to replay")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> EventUpdateCancel200Response:
+ """Replay events
+
+ Cancels all runs for a list of events.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param cancel_event_request: The event ids to replay (required)
+ :type cancel_event_request: CancelEventRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_update_cancel_serialize(
+ tenant=tenant,
+ cancel_event_request=cancel_event_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventUpdateCancel200Response",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def event_update_cancel_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ cancel_event_request: Annotated[
+ CancelEventRequest, Field(description="The event ids to replay")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[EventUpdateCancel200Response]:
+ """Replay events
+
+ Cancels all runs for a list of events.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param cancel_event_request: The event ids to replay (required)
+ :type cancel_event_request: CancelEventRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_update_cancel_serialize(
+ tenant=tenant,
+ cancel_event_request=cancel_event_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventUpdateCancel200Response",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def event_update_cancel_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ cancel_event_request: Annotated[
+ CancelEventRequest, Field(description="The event ids to replay")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Replay events
+
+ Cancels all runs for a list of events.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param cancel_event_request: The event ids to replay (required)
+ :type cancel_event_request: CancelEventRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_update_cancel_serialize(
+ tenant=tenant,
+ cancel_event_request=cancel_event_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventUpdateCancel200Response",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _event_update_cancel_serialize(
+ self,
+ tenant,
+ cancel_event_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if cancel_event_request is not None:
+ _body_params = cancel_event_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/events/cancel",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def event_update_replay(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ replay_event_request: Annotated[
+ ReplayEventRequest, Field(description="The event ids to replay")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> EventList:
+ """Replay events
+
+ Replays a list of events.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param replay_event_request: The event ids to replay (required)
+ :type replay_event_request: ReplayEventRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_update_replay_serialize(
+ tenant=tenant,
+ replay_event_request=replay_event_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def event_update_replay_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ replay_event_request: Annotated[
+ ReplayEventRequest, Field(description="The event ids to replay")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[EventList]:
+ """Replay events
+
+ Replays a list of events.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param replay_event_request: The event ids to replay (required)
+ :type replay_event_request: ReplayEventRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_update_replay_serialize(
+ tenant=tenant,
+ replay_event_request=replay_event_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def event_update_replay_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ replay_event_request: Annotated[
+ ReplayEventRequest, Field(description="The event ids to replay")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Replay events
+
+ Replays a list of events.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param replay_event_request: The event ids to replay (required)
+ :type replay_event_request: ReplayEventRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._event_update_replay_serialize(
+ tenant=tenant,
+ replay_event_request=replay_event_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _event_update_replay_serialize(
+ self,
+ tenant,
+ replay_event_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if replay_event_request is not None:
+ _body_params = replay_event_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/events/replay",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/github_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/github_api.py
new file mode 100644
index 00000000..23c1b269
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/github_api.py
@@ -0,0 +1,331 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class GithubApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def sns_update(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ event: Annotated[
+ str,
+ Field(
+ min_length=1, strict=True, max_length=255, description="The event key"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Github app tenant webhook
+
+ SNS event
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param event: The event key (required)
+ :type event: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._sns_update_serialize(
+ tenant=tenant,
+ event=event,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def sns_update_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ event: Annotated[
+ str,
+ Field(
+ min_length=1, strict=True, max_length=255, description="The event key"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Github app tenant webhook
+
+ SNS event
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param event: The event key (required)
+ :type event: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._sns_update_serialize(
+ tenant=tenant,
+ event=event,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def sns_update_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ event: Annotated[
+ str,
+ Field(
+ min_length=1, strict=True, max_length=255, description="The event key"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Github app tenant webhook
+
+ SNS event
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param event: The event key (required)
+ :type event: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._sns_update_serialize(
+ tenant=tenant,
+ event=event,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _sns_update_serialize(
+ self,
+ tenant,
+ event,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if event is not None:
+ _path_params["event"] = event
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = []
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/sns/{tenant}/{event}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/healthcheck_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/healthcheck_api.py
new file mode 100644
index 00000000..4b7793eb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/healthcheck_api.py
@@ -0,0 +1,483 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class HealthcheckApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def liveness_get(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Get liveness
+
+ Gets the liveness status
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._liveness_get_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "500": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def liveness_get_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Get liveness
+
+ Gets the liveness status
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._liveness_get_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "500": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def liveness_get_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get liveness
+
+ Gets the liveness status
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._liveness_get_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "500": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _liveness_get_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # authentication setting
+ _auth_settings: List[str] = []
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/live",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def readiness_get(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Get readiness
+
+ Gets the readiness status
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._readiness_get_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "500": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def readiness_get_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Get readiness
+
+ Gets the readiness status
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._readiness_get_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "500": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def readiness_get_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get readiness
+
+ Gets the readiness status
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._readiness_get_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "500": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _readiness_get_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # authentication setting
+ _auth_settings: List[str] = []
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/ready",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/log_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/log_api.py
new file mode 100644
index 00000000..eaf16677
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/log_api.py
@@ -0,0 +1,447 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.log_line_level import LogLineLevel
+from hatchet_sdk.clients.rest.models.log_line_list import LogLineList
+from hatchet_sdk.clients.rest.models.log_line_order_by_direction import (
+ LogLineOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.log_line_order_by_field import LogLineOrderByField
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class LogApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def log_line_list(
+ self,
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ levels: Annotated[
+ Optional[List[LogLineLevel]],
+ Field(description="A list of levels to filter by"),
+ ] = None,
+ search: Annotated[
+ Optional[StrictStr], Field(description="The search query to filter for")
+ ] = None,
+ order_by_field: Annotated[
+ Optional[LogLineOrderByField], Field(description="What to order by")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[LogLineOrderByDirection], Field(description="The order direction")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> LogLineList:
+ """List log lines
+
+ Lists log lines for a step run.
+
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param levels: A list of levels to filter by
+ :type levels: List[LogLineLevel]
+ :param search: The search query to filter for
+ :type search: str
+ :param order_by_field: What to order by
+ :type order_by_field: LogLineOrderByField
+ :param order_by_direction: The order direction
+ :type order_by_direction: LogLineOrderByDirection
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._log_line_list_serialize(
+ step_run=step_run,
+ offset=offset,
+ limit=limit,
+ levels=levels,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "LogLineList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def log_line_list_with_http_info(
+ self,
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ levels: Annotated[
+ Optional[List[LogLineLevel]],
+ Field(description="A list of levels to filter by"),
+ ] = None,
+ search: Annotated[
+ Optional[StrictStr], Field(description="The search query to filter for")
+ ] = None,
+ order_by_field: Annotated[
+ Optional[LogLineOrderByField], Field(description="What to order by")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[LogLineOrderByDirection], Field(description="The order direction")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[LogLineList]:
+ """List log lines
+
+ Lists log lines for a step run.
+
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param levels: A list of levels to filter by
+ :type levels: List[LogLineLevel]
+ :param search: The search query to filter for
+ :type search: str
+ :param order_by_field: What to order by
+ :type order_by_field: LogLineOrderByField
+ :param order_by_direction: The order direction
+ :type order_by_direction: LogLineOrderByDirection
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._log_line_list_serialize(
+ step_run=step_run,
+ offset=offset,
+ limit=limit,
+ levels=levels,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "LogLineList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def log_line_list_without_preload_content(
+ self,
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ levels: Annotated[
+ Optional[List[LogLineLevel]],
+ Field(description="A list of levels to filter by"),
+ ] = None,
+ search: Annotated[
+ Optional[StrictStr], Field(description="The search query to filter for")
+ ] = None,
+ order_by_field: Annotated[
+ Optional[LogLineOrderByField], Field(description="What to order by")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[LogLineOrderByDirection], Field(description="The order direction")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List log lines
+
+ Lists log lines for a step run.
+
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param levels: A list of levels to filter by
+ :type levels: List[LogLineLevel]
+ :param search: The search query to filter for
+ :type search: str
+ :param order_by_field: What to order by
+ :type order_by_field: LogLineOrderByField
+ :param order_by_direction: The order direction
+ :type order_by_direction: LogLineOrderByDirection
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._log_line_list_serialize(
+ step_run=step_run,
+ offset=offset,
+ limit=limit,
+ levels=levels,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "LogLineList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _log_line_list_serialize(
+ self,
+ step_run,
+ offset,
+ limit,
+ levels,
+ search,
+ order_by_field,
+ order_by_direction,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ "levels": "multi",
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if step_run is not None:
+ _path_params["step-run"] = step_run
+ # process the query parameters
+ if offset is not None:
+
+ _query_params.append(("offset", offset))
+
+ if limit is not None:
+
+ _query_params.append(("limit", limit))
+
+ if levels is not None:
+
+ _query_params.append(("levels", levels))
+
+ if search is not None:
+
+ _query_params.append(("search", search))
+
+ if order_by_field is not None:
+
+ _query_params.append(("orderByField", order_by_field.value))
+
+ if order_by_direction is not None:
+
+ _query_params.append(("orderByDirection", order_by_direction.value))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/step-runs/{step-run}/logs",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/metadata_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/metadata_api.py
new file mode 100644
index 00000000..61659069
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/metadata_api.py
@@ -0,0 +1,728 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.api_errors import APIErrors
+from hatchet_sdk.clients.rest.models.api_meta import APIMeta
+from hatchet_sdk.clients.rest.models.api_meta_integration import APIMetaIntegration
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class MetadataApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def cloud_metadata_get(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> APIErrors:
+ """Get cloud metadata
+
+ Gets metadata for the Hatchet cloud instance
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._cloud_metadata_get_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "APIErrors",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def cloud_metadata_get_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[APIErrors]:
+ """Get cloud metadata
+
+ Gets metadata for the Hatchet cloud instance
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._cloud_metadata_get_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "APIErrors",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def cloud_metadata_get_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get cloud metadata
+
+ Gets metadata for the Hatchet cloud instance
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._cloud_metadata_get_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "APIErrors",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _cloud_metadata_get_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = []
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/cloud/metadata",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def metadata_get(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> APIMeta:
+ """Get metadata
+
+ Gets metadata for the Hatchet instance
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._metadata_get_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "APIMeta",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def metadata_get_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[APIMeta]:
+ """Get metadata
+
+ Gets metadata for the Hatchet instance
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._metadata_get_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "APIMeta",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def metadata_get_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get metadata
+
+ Gets metadata for the Hatchet instance
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._metadata_get_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "APIMeta",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _metadata_get_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = []
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/meta",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def metadata_list_integrations(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> List[APIMetaIntegration]:
+ """List integrations
+
+ List all integrations
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._metadata_list_integrations_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "List[APIMetaIntegration]",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def metadata_list_integrations_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[List[APIMetaIntegration]]:
+ """List integrations
+
+ List all integrations
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._metadata_list_integrations_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "List[APIMetaIntegration]",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def metadata_list_integrations_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List integrations
+
+ List all integrations
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._metadata_list_integrations_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "List[APIMetaIntegration]",
+ "400": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _metadata_list_integrations_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/meta/integrations",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/rate_limits_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/rate_limits_api.py
new file mode 100644
index 00000000..c5e7e4ee
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/rate_limits_api.py
@@ -0,0 +1,423 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.rate_limit_list import RateLimitList
+from hatchet_sdk.clients.rest.models.rate_limit_order_by_direction import (
+ RateLimitOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.rate_limit_order_by_field import (
+ RateLimitOrderByField,
+)
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class RateLimitsApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def rate_limit_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ search: Annotated[
+ Optional[StrictStr], Field(description="The search query to filter for")
+ ] = None,
+ order_by_field: Annotated[
+ Optional[RateLimitOrderByField], Field(description="What to order by")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[RateLimitOrderByDirection],
+ Field(description="The order direction"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RateLimitList:
+ """List rate limits
+
+ Lists all rate limits for a tenant.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param search: The search query to filter for
+ :type search: str
+ :param order_by_field: What to order by
+ :type order_by_field: RateLimitOrderByField
+ :param order_by_direction: The order direction
+ :type order_by_direction: RateLimitOrderByDirection
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._rate_limit_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "RateLimitList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def rate_limit_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ search: Annotated[
+ Optional[StrictStr], Field(description="The search query to filter for")
+ ] = None,
+ order_by_field: Annotated[
+ Optional[RateLimitOrderByField], Field(description="What to order by")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[RateLimitOrderByDirection],
+ Field(description="The order direction"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[RateLimitList]:
+ """List rate limits
+
+ Lists all rate limits for a tenant.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param search: The search query to filter for
+ :type search: str
+ :param order_by_field: What to order by
+ :type order_by_field: RateLimitOrderByField
+ :param order_by_direction: The order direction
+ :type order_by_direction: RateLimitOrderByDirection
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._rate_limit_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "RateLimitList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def rate_limit_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ search: Annotated[
+ Optional[StrictStr], Field(description="The search query to filter for")
+ ] = None,
+ order_by_field: Annotated[
+ Optional[RateLimitOrderByField], Field(description="What to order by")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[RateLimitOrderByDirection],
+ Field(description="The order direction"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List rate limits
+
+ Lists all rate limits for a tenant.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param search: The search query to filter for
+ :type search: str
+ :param order_by_field: What to order by
+ :type order_by_field: RateLimitOrderByField
+ :param order_by_direction: The order direction
+ :type order_by_direction: RateLimitOrderByDirection
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._rate_limit_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "RateLimitList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _rate_limit_list_serialize(
+ self,
+ tenant,
+ offset,
+ limit,
+ search,
+ order_by_field,
+ order_by_direction,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ if offset is not None:
+
+ _query_params.append(("offset", offset))
+
+ if limit is not None:
+
+ _query_params.append(("limit", limit))
+
+ if search is not None:
+
+ _query_params.append(("search", search))
+
+ if order_by_field is not None:
+
+ _query_params.append(("orderByField", order_by_field.value))
+
+ if order_by_direction is not None:
+
+ _query_params.append(("orderByDirection", order_by_direction.value))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/rate-limits",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/slack_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/slack_api.py
new file mode 100644
index 00000000..9b0e637d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/slack_api.py
@@ -0,0 +1,577 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.list_slack_webhooks import ListSlackWebhooks
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class SlackApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def slack_webhook_delete(
+ self,
+ slack: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The Slack webhook id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Delete Slack webhook
+
+ Delete Slack webhook
+
+ :param slack: The Slack webhook id (required)
+ :type slack: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._slack_webhook_delete_serialize(
+ slack=slack,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def slack_webhook_delete_with_http_info(
+ self,
+ slack: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The Slack webhook id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Delete Slack webhook
+
+ Delete Slack webhook
+
+ :param slack: The Slack webhook id (required)
+ :type slack: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._slack_webhook_delete_serialize(
+ slack=slack,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def slack_webhook_delete_without_preload_content(
+ self,
+ slack: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The Slack webhook id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Delete Slack webhook
+
+ Delete Slack webhook
+
+ :param slack: The Slack webhook id (required)
+ :type slack: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._slack_webhook_delete_serialize(
+ slack=slack,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _slack_webhook_delete_serialize(
+ self,
+ slack,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if slack is not None:
+ _path_params["slack"] = slack
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="DELETE",
+ resource_path="/api/v1/slack/{slack}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def slack_webhook_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ListSlackWebhooks:
+ """List Slack integrations
+
+ List Slack webhooks
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._slack_webhook_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ListSlackWebhooks",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def slack_webhook_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[ListSlackWebhooks]:
+ """List Slack integrations
+
+ List Slack webhooks
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._slack_webhook_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ListSlackWebhooks",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def slack_webhook_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List Slack integrations
+
+ List Slack webhooks
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._slack_webhook_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ListSlackWebhooks",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _slack_webhook_list_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/slack",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/sns_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/sns_api.py
new file mode 100644
index 00000000..bb020ceb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/sns_api.py
@@ -0,0 +1,872 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.create_sns_integration_request import (
+ CreateSNSIntegrationRequest,
+)
+from hatchet_sdk.clients.rest.models.list_sns_integrations import ListSNSIntegrations
+from hatchet_sdk.clients.rest.models.sns_integration import SNSIntegration
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class SNSApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def sns_create(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_sns_integration_request: Optional[CreateSNSIntegrationRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> SNSIntegration:
+ """Create SNS integration
+
+ Create SNS integration
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_sns_integration_request:
+ :type create_sns_integration_request: CreateSNSIntegrationRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._sns_create_serialize(
+ tenant=tenant,
+ create_sns_integration_request=create_sns_integration_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "201": "SNSIntegration",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def sns_create_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_sns_integration_request: Optional[CreateSNSIntegrationRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[SNSIntegration]:
+ """Create SNS integration
+
+ Create SNS integration
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_sns_integration_request:
+ :type create_sns_integration_request: CreateSNSIntegrationRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._sns_create_serialize(
+ tenant=tenant,
+ create_sns_integration_request=create_sns_integration_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "201": "SNSIntegration",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def sns_create_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_sns_integration_request: Optional[CreateSNSIntegrationRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Create SNS integration
+
+ Create SNS integration
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_sns_integration_request:
+ :type create_sns_integration_request: CreateSNSIntegrationRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._sns_create_serialize(
+ tenant=tenant,
+ create_sns_integration_request=create_sns_integration_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "201": "SNSIntegration",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _sns_create_serialize(
+ self,
+ tenant,
+ create_sns_integration_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if create_sns_integration_request is not None:
+ _body_params = create_sns_integration_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/sns",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def sns_delete(
+ self,
+ sns: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The SNS integration id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Delete SNS integration
+
+ Delete SNS integration
+
+ :param sns: The SNS integration id (required)
+ :type sns: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._sns_delete_serialize(
+ sns=sns,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def sns_delete_with_http_info(
+ self,
+ sns: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The SNS integration id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Delete SNS integration
+
+ Delete SNS integration
+
+ :param sns: The SNS integration id (required)
+ :type sns: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._sns_delete_serialize(
+ sns=sns,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def sns_delete_without_preload_content(
+ self,
+ sns: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The SNS integration id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Delete SNS integration
+
+ Delete SNS integration
+
+ :param sns: The SNS integration id (required)
+ :type sns: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._sns_delete_serialize(
+ sns=sns,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _sns_delete_serialize(
+ self,
+ sns,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if sns is not None:
+ _path_params["sns"] = sns
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="DELETE",
+ resource_path="/api/v1/sns/{sns}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def sns_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ListSNSIntegrations:
+ """List SNS integrations
+
+ List SNS integrations
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._sns_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ListSNSIntegrations",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def sns_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[ListSNSIntegrations]:
+ """List SNS integrations
+
+ List SNS integrations
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._sns_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ListSNSIntegrations",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def sns_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List SNS integrations
+
+ List SNS integrations
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._sns_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ListSNSIntegrations",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _sns_list_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/sns",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/step_run_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/step_run_api.py
new file mode 100644
index 00000000..851ed174
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/step_run_api.py
@@ -0,0 +1,2200 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.rerun_step_run_request import RerunStepRunRequest
+from hatchet_sdk.clients.rest.models.step_run import StepRun
+from hatchet_sdk.clients.rest.models.step_run_archive_list import StepRunArchiveList
+from hatchet_sdk.clients.rest.models.step_run_event_list import StepRunEventList
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class StepRunApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def step_run_get(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> StepRun:
+ """Get step run
+
+ Get a step run by id
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_get_serialize(
+ tenant=tenant,
+ step_run=step_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def step_run_get_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[StepRun]:
+ """Get step run
+
+ Get a step run by id
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_get_serialize(
+ tenant=tenant,
+ step_run=step_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def step_run_get_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get step run
+
+ Get a step run by id
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_get_serialize(
+ tenant=tenant,
+ step_run=step_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _step_run_get_serialize(
+ self,
+ tenant,
+ step_run,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if step_run is not None:
+ _path_params["step-run"] = step_run
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/step-runs/{step-run}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def step_run_get_schema(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> object:
+ """Get step run schema
+
+ Get the schema for a step run
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_get_schema_serialize(
+ tenant=tenant,
+ step_run=step_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "object",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def step_run_get_schema_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[object]:
+ """Get step run schema
+
+ Get the schema for a step run
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_get_schema_serialize(
+ tenant=tenant,
+ step_run=step_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "object",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def step_run_get_schema_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get step run schema
+
+ Get the schema for a step run
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_get_schema_serialize(
+ tenant=tenant,
+ step_run=step_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "object",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _step_run_get_schema_serialize(
+ self,
+ tenant,
+ step_run,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if step_run is not None:
+ _path_params["step-run"] = step_run
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/step-runs/{step-run}/schema",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def step_run_list_archives(
+ self,
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> StepRunArchiveList:
+ """List archives for step run
+
+ List archives for a step run
+
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_list_archives_serialize(
+ step_run=step_run,
+ offset=offset,
+ limit=limit,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRunArchiveList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def step_run_list_archives_with_http_info(
+ self,
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[StepRunArchiveList]:
+ """List archives for step run
+
+ List archives for a step run
+
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_list_archives_serialize(
+ step_run=step_run,
+ offset=offset,
+ limit=limit,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRunArchiveList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def step_run_list_archives_without_preload_content(
+ self,
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List archives for step run
+
+ List archives for a step run
+
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_list_archives_serialize(
+ step_run=step_run,
+ offset=offset,
+ limit=limit,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRunArchiveList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _step_run_list_archives_serialize(
+ self,
+ step_run,
+ offset,
+ limit,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if step_run is not None:
+ _path_params["step-run"] = step_run
+ # process the query parameters
+ if offset is not None:
+
+ _query_params.append(("offset", offset))
+
+ if limit is not None:
+
+ _query_params.append(("limit", limit))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/step-runs/{step-run}/archives",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def step_run_list_events(
+ self,
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> StepRunEventList:
+ """List events for step run
+
+ List events for a step run
+
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_list_events_serialize(
+ step_run=step_run,
+ offset=offset,
+ limit=limit,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRunEventList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def step_run_list_events_with_http_info(
+ self,
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[StepRunEventList]:
+ """List events for step run
+
+ List events for a step run
+
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_list_events_serialize(
+ step_run=step_run,
+ offset=offset,
+ limit=limit,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRunEventList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def step_run_list_events_without_preload_content(
+ self,
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List events for step run
+
+ List events for a step run
+
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_list_events_serialize(
+ step_run=step_run,
+ offset=offset,
+ limit=limit,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRunEventList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _step_run_list_events_serialize(
+ self,
+ step_run,
+ offset,
+ limit,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if step_run is not None:
+ _path_params["step-run"] = step_run
+ # process the query parameters
+ if offset is not None:
+
+ _query_params.append(("offset", offset))
+
+ if limit is not None:
+
+ _query_params.append(("limit", limit))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/step-runs/{step-run}/events",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def step_run_update_cancel(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> StepRun:
+ """Attempts to cancel a step run
+
+ Attempts to cancel a step run
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_update_cancel_serialize(
+ tenant=tenant,
+ step_run=step_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def step_run_update_cancel_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[StepRun]:
+ """Attempts to cancel a step run
+
+ Attempts to cancel a step run
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_update_cancel_serialize(
+ tenant=tenant,
+ step_run=step_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def step_run_update_cancel_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Attempts to cancel a step run
+
+ Attempts to cancel a step run
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_update_cancel_serialize(
+ tenant=tenant,
+ step_run=step_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _step_run_update_cancel_serialize(
+ self,
+ tenant,
+ step_run,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if step_run is not None:
+ _path_params["step-run"] = step_run
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/step-runs/{step-run}/cancel",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def step_run_update_rerun(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ rerun_step_run_request: Annotated[
+ RerunStepRunRequest, Field(description="The input to the rerun")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> StepRun:
+ """Rerun step run
+
+ Reruns a step run
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param rerun_step_run_request: The input to the rerun (required)
+ :type rerun_step_run_request: RerunStepRunRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_update_rerun_serialize(
+ tenant=tenant,
+ step_run=step_run,
+ rerun_step_run_request=rerun_step_run_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def step_run_update_rerun_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ rerun_step_run_request: Annotated[
+ RerunStepRunRequest, Field(description="The input to the rerun")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[StepRun]:
+ """Rerun step run
+
+ Reruns a step run
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param rerun_step_run_request: The input to the rerun (required)
+ :type rerun_step_run_request: RerunStepRunRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_update_rerun_serialize(
+ tenant=tenant,
+ step_run=step_run,
+ rerun_step_run_request=rerun_step_run_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def step_run_update_rerun_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ step_run: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The step run id"
+ ),
+ ],
+ rerun_step_run_request: Annotated[
+ RerunStepRunRequest, Field(description="The input to the rerun")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Rerun step run
+
+ Reruns a step run
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param step_run: The step run id (required)
+ :type step_run: str
+ :param rerun_step_run_request: The input to the rerun (required)
+ :type rerun_step_run_request: RerunStepRunRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._step_run_update_rerun_serialize(
+ tenant=tenant,
+ step_run=step_run,
+ rerun_step_run_request=rerun_step_run_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _step_run_update_rerun_serialize(
+ self,
+ tenant,
+ step_run,
+ rerun_step_run_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if step_run is not None:
+ _path_params["step-run"] = step_run
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if rerun_step_run_request is not None:
+ _body_params = rerun_step_run_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/step-runs/{step-run}/rerun",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_run_list_step_run_events(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ last_id: Annotated[
+ Optional[StrictInt], Field(description="Last ID of the last event")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> StepRunEventList:
+ """List events for all step runs for a workflow run
+
+ List events for all step runs for a workflow run
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param last_id: Last ID of the last event
+ :type last_id: int
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_list_step_run_events_serialize(
+ tenant=tenant,
+ workflow_run=workflow_run,
+ last_id=last_id,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRunEventList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_run_list_step_run_events_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ last_id: Annotated[
+ Optional[StrictInt], Field(description="Last ID of the last event")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[StepRunEventList]:
+ """List events for all step runs for a workflow run
+
+ List events for all step runs for a workflow run
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param last_id: Last ID of the last event
+ :type last_id: int
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_list_step_run_events_serialize(
+ tenant=tenant,
+ workflow_run=workflow_run,
+ last_id=last_id,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRunEventList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_run_list_step_run_events_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ last_id: Annotated[
+ Optional[StrictInt], Field(description="Last ID of the last event")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List events for all step runs for a workflow run
+
+ List events for all step runs for a workflow run
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param last_id: Last ID of the last event
+ :type last_id: int
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_list_step_run_events_serialize(
+ tenant=tenant,
+ workflow_run=workflow_run,
+ last_id=last_id,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "StepRunEventList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_run_list_step_run_events_serialize(
+ self,
+ tenant,
+ workflow_run,
+ last_id,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if workflow_run is not None:
+ _path_params["workflow-run"] = workflow_run
+ # process the query parameters
+ if last_id is not None:
+
+ _query_params.append(("lastId", last_id))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/workflow-runs/{workflow-run}/step-run-events",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/tenant_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/tenant_api.py
new file mode 100644
index 00000000..cd5e4f07
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/tenant_api.py
@@ -0,0 +1,4428 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.accept_invite_request import AcceptInviteRequest
+from hatchet_sdk.clients.rest.models.create_tenant_alert_email_group_request import (
+ CreateTenantAlertEmailGroupRequest,
+)
+from hatchet_sdk.clients.rest.models.create_tenant_invite_request import (
+ CreateTenantInviteRequest,
+)
+from hatchet_sdk.clients.rest.models.create_tenant_request import CreateTenantRequest
+from hatchet_sdk.clients.rest.models.reject_invite_request import RejectInviteRequest
+from hatchet_sdk.clients.rest.models.tenant import Tenant
+from hatchet_sdk.clients.rest.models.tenant_alert_email_group import (
+ TenantAlertEmailGroup,
+)
+from hatchet_sdk.clients.rest.models.tenant_alert_email_group_list import (
+ TenantAlertEmailGroupList,
+)
+from hatchet_sdk.clients.rest.models.tenant_alerting_settings import (
+ TenantAlertingSettings,
+)
+from hatchet_sdk.clients.rest.models.tenant_invite import TenantInvite
+from hatchet_sdk.clients.rest.models.tenant_invite_list import TenantInviteList
+from hatchet_sdk.clients.rest.models.tenant_member import TenantMember
+from hatchet_sdk.clients.rest.models.tenant_member_list import TenantMemberList
+from hatchet_sdk.clients.rest.models.tenant_resource_policy import TenantResourcePolicy
+from hatchet_sdk.clients.rest.models.tenant_step_run_queue_metrics import (
+ TenantStepRunQueueMetrics,
+)
+from hatchet_sdk.clients.rest.models.update_tenant_alert_email_group_request import (
+ UpdateTenantAlertEmailGroupRequest,
+)
+from hatchet_sdk.clients.rest.models.update_tenant_request import UpdateTenantRequest
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class TenantApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def alert_email_group_create(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_tenant_alert_email_group_request: Annotated[
+ CreateTenantAlertEmailGroupRequest,
+ Field(description="The tenant alert email group to create"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantAlertEmailGroup:
+ """Create tenant alert email group
+
+ Creates a new tenant alert email group
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_tenant_alert_email_group_request: The tenant alert email group to create (required)
+ :type create_tenant_alert_email_group_request: CreateTenantAlertEmailGroupRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._alert_email_group_create_serialize(
+ tenant=tenant,
+ create_tenant_alert_email_group_request=create_tenant_alert_email_group_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "201": "TenantAlertEmailGroup",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def alert_email_group_create_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_tenant_alert_email_group_request: Annotated[
+ CreateTenantAlertEmailGroupRequest,
+ Field(description="The tenant alert email group to create"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantAlertEmailGroup]:
+ """Create tenant alert email group
+
+ Creates a new tenant alert email group
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_tenant_alert_email_group_request: The tenant alert email group to create (required)
+ :type create_tenant_alert_email_group_request: CreateTenantAlertEmailGroupRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._alert_email_group_create_serialize(
+ tenant=tenant,
+ create_tenant_alert_email_group_request=create_tenant_alert_email_group_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "201": "TenantAlertEmailGroup",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def alert_email_group_create_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_tenant_alert_email_group_request: Annotated[
+ CreateTenantAlertEmailGroupRequest,
+ Field(description="The tenant alert email group to create"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Create tenant alert email group
+
+ Creates a new tenant alert email group
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_tenant_alert_email_group_request: The tenant alert email group to create (required)
+ :type create_tenant_alert_email_group_request: CreateTenantAlertEmailGroupRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._alert_email_group_create_serialize(
+ tenant=tenant,
+ create_tenant_alert_email_group_request=create_tenant_alert_email_group_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "201": "TenantAlertEmailGroup",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _alert_email_group_create_serialize(
+ self,
+ tenant,
+ create_tenant_alert_email_group_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if create_tenant_alert_email_group_request is not None:
+ _body_params = create_tenant_alert_email_group_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/alerting-email-groups",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def alert_email_group_delete(
+ self,
+ alert_email_group: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant alert email group id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Delete tenant alert email group
+
+ Deletes a tenant alert email group
+
+ :param alert_email_group: The tenant alert email group id (required)
+ :type alert_email_group: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._alert_email_group_delete_serialize(
+ alert_email_group=alert_email_group,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def alert_email_group_delete_with_http_info(
+ self,
+ alert_email_group: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant alert email group id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Delete tenant alert email group
+
+ Deletes a tenant alert email group
+
+ :param alert_email_group: The tenant alert email group id (required)
+ :type alert_email_group: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._alert_email_group_delete_serialize(
+ alert_email_group=alert_email_group,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def alert_email_group_delete_without_preload_content(
+ self,
+ alert_email_group: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant alert email group id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Delete tenant alert email group
+
+ Deletes a tenant alert email group
+
+ :param alert_email_group: The tenant alert email group id (required)
+ :type alert_email_group: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._alert_email_group_delete_serialize(
+ alert_email_group=alert_email_group,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _alert_email_group_delete_serialize(
+ self,
+ alert_email_group,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if alert_email_group is not None:
+ _path_params["alert-email-group"] = alert_email_group
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="DELETE",
+ resource_path="/api/v1/alerting-email-groups/{alert-email-group}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def alert_email_group_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantAlertEmailGroupList:
+ """List tenant alert email groups
+
+ Gets a list of tenant alert email groups
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._alert_email_group_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantAlertEmailGroupList",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def alert_email_group_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantAlertEmailGroupList]:
+ """List tenant alert email groups
+
+ Gets a list of tenant alert email groups
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._alert_email_group_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantAlertEmailGroupList",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def alert_email_group_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List tenant alert email groups
+
+ Gets a list of tenant alert email groups
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._alert_email_group_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantAlertEmailGroupList",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _alert_email_group_list_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/alerting-email-groups",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def alert_email_group_update(
+ self,
+ alert_email_group: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant alert email group id",
+ ),
+ ],
+ update_tenant_alert_email_group_request: Annotated[
+ UpdateTenantAlertEmailGroupRequest,
+ Field(description="The tenant alert email group to update"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantAlertEmailGroup:
+ """Update tenant alert email group
+
+ Updates a tenant alert email group
+
+ :param alert_email_group: The tenant alert email group id (required)
+ :type alert_email_group: str
+ :param update_tenant_alert_email_group_request: The tenant alert email group to update (required)
+ :type update_tenant_alert_email_group_request: UpdateTenantAlertEmailGroupRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._alert_email_group_update_serialize(
+ alert_email_group=alert_email_group,
+ update_tenant_alert_email_group_request=update_tenant_alert_email_group_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantAlertEmailGroup",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def alert_email_group_update_with_http_info(
+ self,
+ alert_email_group: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant alert email group id",
+ ),
+ ],
+ update_tenant_alert_email_group_request: Annotated[
+ UpdateTenantAlertEmailGroupRequest,
+ Field(description="The tenant alert email group to update"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantAlertEmailGroup]:
+ """Update tenant alert email group
+
+ Updates a tenant alert email group
+
+ :param alert_email_group: The tenant alert email group id (required)
+ :type alert_email_group: str
+ :param update_tenant_alert_email_group_request: The tenant alert email group to update (required)
+ :type update_tenant_alert_email_group_request: UpdateTenantAlertEmailGroupRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._alert_email_group_update_serialize(
+ alert_email_group=alert_email_group,
+ update_tenant_alert_email_group_request=update_tenant_alert_email_group_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantAlertEmailGroup",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def alert_email_group_update_without_preload_content(
+ self,
+ alert_email_group: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant alert email group id",
+ ),
+ ],
+ update_tenant_alert_email_group_request: Annotated[
+ UpdateTenantAlertEmailGroupRequest,
+ Field(description="The tenant alert email group to update"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Update tenant alert email group
+
+ Updates a tenant alert email group
+
+ :param alert_email_group: The tenant alert email group id (required)
+ :type alert_email_group: str
+ :param update_tenant_alert_email_group_request: The tenant alert email group to update (required)
+ :type update_tenant_alert_email_group_request: UpdateTenantAlertEmailGroupRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._alert_email_group_update_serialize(
+ alert_email_group=alert_email_group,
+ update_tenant_alert_email_group_request=update_tenant_alert_email_group_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantAlertEmailGroup",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _alert_email_group_update_serialize(
+ self,
+ alert_email_group,
+ update_tenant_alert_email_group_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if alert_email_group is not None:
+ _path_params["alert-email-group"] = alert_email_group
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if update_tenant_alert_email_group_request is not None:
+ _body_params = update_tenant_alert_email_group_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="PATCH",
+ resource_path="/api/v1/alerting-email-groups/{alert-email-group}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_alerting_settings_get(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantAlertingSettings:
+ """Get tenant alerting settings
+
+ Gets the alerting settings for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_alerting_settings_get_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantAlertingSettings",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_alerting_settings_get_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantAlertingSettings]:
+ """Get tenant alerting settings
+
+ Gets the alerting settings for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_alerting_settings_get_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantAlertingSettings",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_alerting_settings_get_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get tenant alerting settings
+
+ Gets the alerting settings for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_alerting_settings_get_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantAlertingSettings",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_alerting_settings_get_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/alerting/settings",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_create(
+ self,
+ create_tenant_request: Annotated[
+ CreateTenantRequest, Field(description="The tenant to create")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> Tenant:
+ """Create tenant
+
+ Creates a new tenant
+
+ :param create_tenant_request: The tenant to create (required)
+ :type create_tenant_request: CreateTenantRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_create_serialize(
+ create_tenant_request=create_tenant_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Tenant",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_create_with_http_info(
+ self,
+ create_tenant_request: Annotated[
+ CreateTenantRequest, Field(description="The tenant to create")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[Tenant]:
+ """Create tenant
+
+ Creates a new tenant
+
+ :param create_tenant_request: The tenant to create (required)
+ :type create_tenant_request: CreateTenantRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_create_serialize(
+ create_tenant_request=create_tenant_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Tenant",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_create_without_preload_content(
+ self,
+ create_tenant_request: Annotated[
+ CreateTenantRequest, Field(description="The tenant to create")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Create tenant
+
+ Creates a new tenant
+
+ :param create_tenant_request: The tenant to create (required)
+ :type create_tenant_request: CreateTenantRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_create_serialize(
+ create_tenant_request=create_tenant_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Tenant",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_create_serialize(
+ self,
+ create_tenant_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if create_tenant_request is not None:
+ _body_params = create_tenant_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_get_step_run_queue_metrics(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantStepRunQueueMetrics:
+ """Get step run metrics
+
+ Get the queue metrics for the tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_get_step_run_queue_metrics_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantStepRunQueueMetrics",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_get_step_run_queue_metrics_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantStepRunQueueMetrics]:
+ """Get step run metrics
+
+ Get the queue metrics for the tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_get_step_run_queue_metrics_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantStepRunQueueMetrics",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_get_step_run_queue_metrics_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get step run metrics
+
+ Get the queue metrics for the tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_get_step_run_queue_metrics_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantStepRunQueueMetrics",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_get_step_run_queue_metrics_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/step-run-queue-metrics",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_invite_accept(
+ self,
+ accept_invite_request: Optional[AcceptInviteRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Accept tenant invite
+
+ Accepts a tenant invite
+
+ :param accept_invite_request:
+ :type accept_invite_request: AcceptInviteRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_accept_serialize(
+ accept_invite_request=accept_invite_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_invite_accept_with_http_info(
+ self,
+ accept_invite_request: Optional[AcceptInviteRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Accept tenant invite
+
+ Accepts a tenant invite
+
+ :param accept_invite_request:
+ :type accept_invite_request: AcceptInviteRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_accept_serialize(
+ accept_invite_request=accept_invite_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_invite_accept_without_preload_content(
+ self,
+ accept_invite_request: Optional[AcceptInviteRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Accept tenant invite
+
+ Accepts a tenant invite
+
+ :param accept_invite_request:
+ :type accept_invite_request: AcceptInviteRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_accept_serialize(
+ accept_invite_request=accept_invite_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_invite_accept_serialize(
+ self,
+ accept_invite_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if accept_invite_request is not None:
+ _body_params = accept_invite_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/users/invites/accept",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_invite_create(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_tenant_invite_request: Annotated[
+ CreateTenantInviteRequest, Field(description="The tenant invite to create")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantInvite:
+ """Create tenant invite
+
+ Creates a new tenant invite
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_tenant_invite_request: The tenant invite to create (required)
+ :type create_tenant_invite_request: CreateTenantInviteRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_create_serialize(
+ tenant=tenant,
+ create_tenant_invite_request=create_tenant_invite_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "201": "TenantInvite",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_invite_create_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_tenant_invite_request: Annotated[
+ CreateTenantInviteRequest, Field(description="The tenant invite to create")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantInvite]:
+ """Create tenant invite
+
+ Creates a new tenant invite
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_tenant_invite_request: The tenant invite to create (required)
+ :type create_tenant_invite_request: CreateTenantInviteRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_create_serialize(
+ tenant=tenant,
+ create_tenant_invite_request=create_tenant_invite_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "201": "TenantInvite",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_invite_create_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ create_tenant_invite_request: Annotated[
+ CreateTenantInviteRequest, Field(description="The tenant invite to create")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Create tenant invite
+
+ Creates a new tenant invite
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param create_tenant_invite_request: The tenant invite to create (required)
+ :type create_tenant_invite_request: CreateTenantInviteRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_create_serialize(
+ tenant=tenant,
+ create_tenant_invite_request=create_tenant_invite_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "201": "TenantInvite",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_invite_create_serialize(
+ self,
+ tenant,
+ create_tenant_invite_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if create_tenant_invite_request is not None:
+ _body_params = create_tenant_invite_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/invites",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_invite_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantInviteList:
+ """List tenant invites
+
+ Gets a list of tenant invites
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantInviteList",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_invite_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantInviteList]:
+ """List tenant invites
+
+ Gets a list of tenant invites
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantInviteList",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_invite_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List tenant invites
+
+ Gets a list of tenant invites
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantInviteList",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_invite_list_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/invites",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_invite_reject(
+ self,
+ reject_invite_request: Optional[RejectInviteRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Reject tenant invite
+
+ Rejects a tenant invite
+
+ :param reject_invite_request:
+ :type reject_invite_request: RejectInviteRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_reject_serialize(
+ reject_invite_request=reject_invite_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_invite_reject_with_http_info(
+ self,
+ reject_invite_request: Optional[RejectInviteRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Reject tenant invite
+
+ Rejects a tenant invite
+
+ :param reject_invite_request:
+ :type reject_invite_request: RejectInviteRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_reject_serialize(
+ reject_invite_request=reject_invite_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_invite_reject_without_preload_content(
+ self,
+ reject_invite_request: Optional[RejectInviteRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Reject tenant invite
+
+ Rejects a tenant invite
+
+ :param reject_invite_request:
+ :type reject_invite_request: RejectInviteRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_invite_reject_serialize(
+ reject_invite_request=reject_invite_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_invite_reject_serialize(
+ self,
+ reject_invite_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if reject_invite_request is not None:
+ _body_params = reject_invite_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/users/invites/reject",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_member_delete(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ member: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant member id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantMember:
+ """Delete a tenant member
+
+ Delete a member from a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param member: The tenant member id (required)
+ :type member: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_member_delete_serialize(
+ tenant=tenant,
+ member=member,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": "TenantMember",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_member_delete_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ member: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant member id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantMember]:
+ """Delete a tenant member
+
+ Delete a member from a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param member: The tenant member id (required)
+ :type member: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_member_delete_serialize(
+ tenant=tenant,
+ member=member,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": "TenantMember",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_member_delete_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ member: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The tenant member id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Delete a tenant member
+
+ Delete a member from a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param member: The tenant member id (required)
+ :type member: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_member_delete_serialize(
+ tenant=tenant,
+ member=member,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": "TenantMember",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_member_delete_serialize(
+ self,
+ tenant,
+ member,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if member is not None:
+ _path_params["member"] = member
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="DELETE",
+ resource_path="/api/v1/tenants/{tenant}/members/{member}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_member_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantMemberList:
+ """List tenant members
+
+ Gets a list of tenant members
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_member_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantMemberList",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_member_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantMemberList]:
+ """List tenant members
+
+ Gets a list of tenant members
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_member_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantMemberList",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_member_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List tenant members
+
+ Gets a list of tenant members
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_member_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantMemberList",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_member_list_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/members",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_resource_policy_get(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantResourcePolicy:
+ """Create tenant alert email group
+
+ Gets the resource policy for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_resource_policy_get_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantResourcePolicy",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_resource_policy_get_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantResourcePolicy]:
+ """Create tenant alert email group
+
+ Gets the resource policy for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_resource_policy_get_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantResourcePolicy",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_resource_policy_get_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Create tenant alert email group
+
+ Gets the resource policy for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_resource_policy_get_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantResourcePolicy",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_resource_policy_get_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/resource-policy",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_update(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ update_tenant_request: Annotated[
+ UpdateTenantRequest, Field(description="The tenant properties to update")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> Tenant:
+ """Update tenant
+
+ Update an existing tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param update_tenant_request: The tenant properties to update (required)
+ :type update_tenant_request: UpdateTenantRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_update_serialize(
+ tenant=tenant,
+ update_tenant_request=update_tenant_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Tenant",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_update_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ update_tenant_request: Annotated[
+ UpdateTenantRequest, Field(description="The tenant properties to update")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[Tenant]:
+ """Update tenant
+
+ Update an existing tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param update_tenant_request: The tenant properties to update (required)
+ :type update_tenant_request: UpdateTenantRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_update_serialize(
+ tenant=tenant,
+ update_tenant_request=update_tenant_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Tenant",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_update_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ update_tenant_request: Annotated[
+ UpdateTenantRequest, Field(description="The tenant properties to update")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Update tenant
+
+ Update an existing tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param update_tenant_request: The tenant properties to update (required)
+ :type update_tenant_request: UpdateTenantRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_update_serialize(
+ tenant=tenant,
+ update_tenant_request=update_tenant_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Tenant",
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_update_serialize(
+ self,
+ tenant,
+ update_tenant_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if update_tenant_request is not None:
+ _body_params = update_tenant_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="PATCH",
+ resource_path="/api/v1/tenants/{tenant}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def user_list_tenant_invites(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantInviteList:
+ """List tenant invites
+
+ Lists all tenant invites for the current user
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_list_tenant_invites_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantInviteList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def user_list_tenant_invites_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantInviteList]:
+ """List tenant invites
+
+ Lists all tenant invites for the current user
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_list_tenant_invites_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantInviteList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def user_list_tenant_invites_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List tenant invites
+
+ Lists all tenant invites for the current user
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_list_tenant_invites_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantInviteList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _user_list_tenant_invites_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/users/invites",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/user_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/user_api.py
new file mode 100644
index 00000000..a9e7a35f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/user_api.py
@@ -0,0 +1,2888 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.user import User
+from hatchet_sdk.clients.rest.models.user_change_password_request import (
+ UserChangePasswordRequest,
+)
+from hatchet_sdk.clients.rest.models.user_login_request import UserLoginRequest
+from hatchet_sdk.clients.rest.models.user_register_request import UserRegisterRequest
+from hatchet_sdk.clients.rest.models.user_tenant_memberships_list import (
+ UserTenantMembershipsList,
+)
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class UserApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def tenant_memberships_list(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> UserTenantMembershipsList:
+ """List tenant memberships
+
+ Lists all tenant memberships for the current user
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_memberships_list_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "UserTenantMembershipsList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_memberships_list_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[UserTenantMembershipsList]:
+ """List tenant memberships
+
+ Lists all tenant memberships for the current user
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_memberships_list_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "UserTenantMembershipsList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_memberships_list_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List tenant memberships
+
+ Lists all tenant memberships for the current user
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_memberships_list_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "UserTenantMembershipsList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_memberships_list_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/users/memberships",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def user_create(
+ self,
+ user_register_request: Optional[UserRegisterRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> User:
+ """Register user
+
+ Registers a user.
+
+ :param user_register_request:
+ :type user_register_request: UserRegisterRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_create_serialize(
+ user_register_request=user_register_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def user_create_with_http_info(
+ self,
+ user_register_request: Optional[UserRegisterRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[User]:
+ """Register user
+
+ Registers a user.
+
+ :param user_register_request:
+ :type user_register_request: UserRegisterRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_create_serialize(
+ user_register_request=user_register_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def user_create_without_preload_content(
+ self,
+ user_register_request: Optional[UserRegisterRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Register user
+
+ Registers a user.
+
+ :param user_register_request:
+ :type user_register_request: UserRegisterRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_create_serialize(
+ user_register_request=user_register_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _user_create_serialize(
+ self,
+ user_register_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if user_register_request is not None:
+ _body_params = user_register_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = []
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/users/register",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def user_get_current(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> User:
+ """Get current user
+
+ Gets the current user
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_get_current_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def user_get_current_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[User]:
+ """Get current user
+
+ Gets the current user
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_get_current_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def user_get_current_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get current user
+
+ Gets the current user
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_get_current_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _user_get_current_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/users/current",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def user_update_github_oauth_callback(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Complete OAuth flow
+
+ Completes the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_github_oauth_callback_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def user_update_github_oauth_callback_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Complete OAuth flow
+
+ Completes the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_github_oauth_callback_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def user_update_github_oauth_callback_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Complete OAuth flow
+
+ Completes the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_github_oauth_callback_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _user_update_github_oauth_callback_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # authentication setting
+ _auth_settings: List[str] = []
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/users/github/callback",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def user_update_github_oauth_start(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Start OAuth flow
+
+ Starts the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_github_oauth_start_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def user_update_github_oauth_start_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Start OAuth flow
+
+ Starts the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_github_oauth_start_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def user_update_github_oauth_start_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Start OAuth flow
+
+ Starts the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_github_oauth_start_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _user_update_github_oauth_start_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # authentication setting
+ _auth_settings: List[str] = []
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/users/github/start",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def user_update_google_oauth_callback(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Complete OAuth flow
+
+ Completes the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_google_oauth_callback_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def user_update_google_oauth_callback_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Complete OAuth flow
+
+ Completes the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_google_oauth_callback_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def user_update_google_oauth_callback_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Complete OAuth flow
+
+ Completes the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_google_oauth_callback_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _user_update_google_oauth_callback_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # authentication setting
+ _auth_settings: List[str] = []
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/users/google/callback",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def user_update_google_oauth_start(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Start OAuth flow
+
+ Starts the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_google_oauth_start_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def user_update_google_oauth_start_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Start OAuth flow
+
+ Starts the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_google_oauth_start_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def user_update_google_oauth_start_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Start OAuth flow
+
+ Starts the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_google_oauth_start_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _user_update_google_oauth_start_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # authentication setting
+ _auth_settings: List[str] = []
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/users/google/start",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def user_update_login(
+ self,
+ user_login_request: Optional[UserLoginRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> User:
+ """Login user
+
+ Logs in a user.
+
+ :param user_login_request:
+ :type user_login_request: UserLoginRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_login_serialize(
+ user_login_request=user_login_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def user_update_login_with_http_info(
+ self,
+ user_login_request: Optional[UserLoginRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[User]:
+ """Login user
+
+ Logs in a user.
+
+ :param user_login_request:
+ :type user_login_request: UserLoginRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_login_serialize(
+ user_login_request=user_login_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def user_update_login_without_preload_content(
+ self,
+ user_login_request: Optional[UserLoginRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Login user
+
+ Logs in a user.
+
+ :param user_login_request:
+ :type user_login_request: UserLoginRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_login_serialize(
+ user_login_request=user_login_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _user_update_login_serialize(
+ self,
+ user_login_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if user_login_request is not None:
+ _body_params = user_login_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = []
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/users/login",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def user_update_logout(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> User:
+ """Logout user
+
+ Logs out a user.
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_logout_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def user_update_logout_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[User]:
+ """Logout user
+
+ Logs out a user.
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_logout_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def user_update_logout_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Logout user
+
+ Logs out a user.
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_logout_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _user_update_logout_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/users/logout",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def user_update_password(
+ self,
+ user_change_password_request: Optional[UserChangePasswordRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> User:
+ """Change user password
+
+ Update a user password.
+
+ :param user_change_password_request:
+ :type user_change_password_request: UserChangePasswordRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_password_serialize(
+ user_change_password_request=user_change_password_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def user_update_password_with_http_info(
+ self,
+ user_change_password_request: Optional[UserChangePasswordRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[User]:
+ """Change user password
+
+ Update a user password.
+
+ :param user_change_password_request:
+ :type user_change_password_request: UserChangePasswordRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_password_serialize(
+ user_change_password_request=user_change_password_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def user_update_password_without_preload_content(
+ self,
+ user_change_password_request: Optional[UserChangePasswordRequest] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Change user password
+
+ Update a user password.
+
+ :param user_change_password_request:
+ :type user_change_password_request: UserChangePasswordRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_password_serialize(
+ user_change_password_request=user_change_password_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "User",
+ "400": "APIErrors",
+ "401": "APIErrors",
+ "405": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _user_update_password_serialize(
+ self,
+ user_change_password_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if user_change_password_request is not None:
+ _body_params = user_change_password_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/users/password",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def user_update_slack_oauth_callback(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Complete OAuth flow
+
+ Completes the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_slack_oauth_callback_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def user_update_slack_oauth_callback_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Complete OAuth flow
+
+ Completes the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_slack_oauth_callback_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def user_update_slack_oauth_callback_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Complete OAuth flow
+
+ Completes the OAuth flow
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_slack_oauth_callback_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _user_update_slack_oauth_callback_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/users/slack/callback",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def user_update_slack_oauth_start(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Start OAuth flow
+
+ Starts the OAuth flow
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_slack_oauth_start_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def user_update_slack_oauth_start_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Start OAuth flow
+
+ Starts the OAuth flow
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_slack_oauth_start_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def user_update_slack_oauth_start_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Start OAuth flow
+
+ Starts the OAuth flow
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._user_update_slack_oauth_start_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "302": None,
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _user_update_slack_oauth_start_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/slack/start",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/worker_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/worker_api.py
new file mode 100644
index 00000000..f1be3e82
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/worker_api.py
@@ -0,0 +1,858 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.update_worker_request import UpdateWorkerRequest
+from hatchet_sdk.clients.rest.models.worker import Worker
+from hatchet_sdk.clients.rest.models.worker_list import WorkerList
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class WorkerApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def worker_get(
+ self,
+ worker: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The worker id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> Worker:
+ """Get worker
+
+ Get a worker
+
+ :param worker: The worker id (required)
+ :type worker: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._worker_get_serialize(
+ worker=worker,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Worker",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def worker_get_with_http_info(
+ self,
+ worker: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The worker id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[Worker]:
+ """Get worker
+
+ Get a worker
+
+ :param worker: The worker id (required)
+ :type worker: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._worker_get_serialize(
+ worker=worker,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Worker",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def worker_get_without_preload_content(
+ self,
+ worker: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The worker id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get worker
+
+ Get a worker
+
+ :param worker: The worker id (required)
+ :type worker: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._worker_get_serialize(
+ worker=worker,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Worker",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _worker_get_serialize(
+ self,
+ worker,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if worker is not None:
+ _path_params["worker"] = worker
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/workers/{worker}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def worker_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WorkerList:
+ """Get workers
+
+ Get all workers for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._worker_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkerList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def worker_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WorkerList]:
+ """Get workers
+
+ Get all workers for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._worker_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkerList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def worker_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workers
+
+ Get all workers for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._worker_list_serialize(
+ tenant=tenant,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkerList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _worker_list_serialize(
+ self,
+ tenant,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/worker",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def worker_update(
+ self,
+ worker: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The worker id"
+ ),
+ ],
+ update_worker_request: Annotated[
+ UpdateWorkerRequest, Field(description="The worker update")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> Worker:
+ """Update worker
+
+ Update a worker
+
+ :param worker: The worker id (required)
+ :type worker: str
+ :param update_worker_request: The worker update (required)
+ :type update_worker_request: UpdateWorkerRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._worker_update_serialize(
+ worker=worker,
+ update_worker_request=update_worker_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Worker",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def worker_update_with_http_info(
+ self,
+ worker: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The worker id"
+ ),
+ ],
+ update_worker_request: Annotated[
+ UpdateWorkerRequest, Field(description="The worker update")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[Worker]:
+ """Update worker
+
+ Update a worker
+
+ :param worker: The worker id (required)
+ :type worker: str
+ :param update_worker_request: The worker update (required)
+ :type update_worker_request: UpdateWorkerRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._worker_update_serialize(
+ worker=worker,
+ update_worker_request=update_worker_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Worker",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def worker_update_without_preload_content(
+ self,
+ worker: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The worker id"
+ ),
+ ],
+ update_worker_request: Annotated[
+ UpdateWorkerRequest, Field(description="The worker update")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Update worker
+
+ Update a worker
+
+ :param worker: The worker id (required)
+ :type worker: str
+ :param update_worker_request: The worker update (required)
+ :type update_worker_request: UpdateWorkerRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._worker_update_serialize(
+ worker=worker,
+ update_worker_request=update_worker_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Worker",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _worker_update_serialize(
+ self,
+ worker,
+ update_worker_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if worker is not None:
+ _path_params["worker"] = worker
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if update_worker_request is not None:
+ _body_params = update_worker_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="PATCH",
+ resource_path="/api/v1/workers/{worker}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/workflow_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/workflow_api.py
new file mode 100644
index 00000000..ce4094aa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/workflow_api.py
@@ -0,0 +1,6310 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from datetime import datetime
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows
+from hatchet_sdk.clients.rest.models.cron_workflows_list import CronWorkflowsList
+from hatchet_sdk.clients.rest.models.cron_workflows_order_by_field import (
+ CronWorkflowsOrderByField,
+)
+from hatchet_sdk.clients.rest.models.scheduled_run_status import ScheduledRunStatus
+from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows
+from hatchet_sdk.clients.rest.models.scheduled_workflows_list import (
+ ScheduledWorkflowsList,
+)
+from hatchet_sdk.clients.rest.models.scheduled_workflows_order_by_field import (
+ ScheduledWorkflowsOrderByField,
+)
+from hatchet_sdk.clients.rest.models.tenant_queue_metrics import TenantQueueMetrics
+from hatchet_sdk.clients.rest.models.workflow import Workflow
+from hatchet_sdk.clients.rest.models.workflow_kind import WorkflowKind
+from hatchet_sdk.clients.rest.models.workflow_list import WorkflowList
+from hatchet_sdk.clients.rest.models.workflow_metrics import WorkflowMetrics
+from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun
+from hatchet_sdk.clients.rest.models.workflow_run_list import WorkflowRunList
+from hatchet_sdk.clients.rest.models.workflow_run_order_by_direction import (
+ WorkflowRunOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.workflow_run_order_by_field import (
+ WorkflowRunOrderByField,
+)
+from hatchet_sdk.clients.rest.models.workflow_run_shape import WorkflowRunShape
+from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus
+from hatchet_sdk.clients.rest.models.workflow_runs_metrics import WorkflowRunsMetrics
+from hatchet_sdk.clients.rest.models.workflow_update_request import (
+ WorkflowUpdateRequest,
+)
+from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion
+from hatchet_sdk.clients.rest.models.workflow_workers_count import WorkflowWorkersCount
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class WorkflowApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def cron_workflow_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ workflow_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The workflow id to get runs for."),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ order_by_field: Annotated[
+ Optional[CronWorkflowsOrderByField], Field(description="The order by field")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[WorkflowRunOrderByDirection],
+ Field(description="The order by direction"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> CronWorkflowsList:
+ """Get cron job workflows
+
+ Get all cron job workflow triggers for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param workflow_id: The workflow id to get runs for.
+ :type workflow_id: str
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param order_by_field: The order by field
+ :type order_by_field: CronWorkflowsOrderByField
+ :param order_by_direction: The order by direction
+ :type order_by_direction: WorkflowRunOrderByDirection
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._cron_workflow_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ workflow_id=workflow_id,
+ additional_metadata=additional_metadata,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "CronWorkflowsList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def cron_workflow_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ workflow_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The workflow id to get runs for."),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ order_by_field: Annotated[
+ Optional[CronWorkflowsOrderByField], Field(description="The order by field")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[WorkflowRunOrderByDirection],
+ Field(description="The order by direction"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[CronWorkflowsList]:
+ """Get cron job workflows
+
+ Get all cron job workflow triggers for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param workflow_id: The workflow id to get runs for.
+ :type workflow_id: str
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param order_by_field: The order by field
+ :type order_by_field: CronWorkflowsOrderByField
+ :param order_by_direction: The order by direction
+ :type order_by_direction: WorkflowRunOrderByDirection
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._cron_workflow_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ workflow_id=workflow_id,
+ additional_metadata=additional_metadata,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "CronWorkflowsList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def cron_workflow_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ workflow_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The workflow id to get runs for."),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ order_by_field: Annotated[
+ Optional[CronWorkflowsOrderByField], Field(description="The order by field")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[WorkflowRunOrderByDirection],
+ Field(description="The order by direction"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get cron job workflows
+
+ Get all cron job workflow triggers for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param workflow_id: The workflow id to get runs for.
+ :type workflow_id: str
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param order_by_field: The order by field
+ :type order_by_field: CronWorkflowsOrderByField
+ :param order_by_direction: The order by direction
+ :type order_by_direction: WorkflowRunOrderByDirection
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._cron_workflow_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ workflow_id=workflow_id,
+ additional_metadata=additional_metadata,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "CronWorkflowsList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _cron_workflow_list_serialize(
+ self,
+ tenant,
+ offset,
+ limit,
+ workflow_id,
+ additional_metadata,
+ order_by_field,
+ order_by_direction,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ "additionalMetadata": "multi",
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ if offset is not None:
+
+ _query_params.append(("offset", offset))
+
+ if limit is not None:
+
+ _query_params.append(("limit", limit))
+
+ if workflow_id is not None:
+
+ _query_params.append(("workflowId", workflow_id))
+
+ if additional_metadata is not None:
+
+ _query_params.append(("additionalMetadata", additional_metadata))
+
+ if order_by_field is not None:
+
+ _query_params.append(("orderByField", order_by_field.value))
+
+ if order_by_direction is not None:
+
+ _query_params.append(("orderByDirection", order_by_direction.value))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/workflows/crons",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def tenant_get_queue_metrics(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflows: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of workflow IDs to filter by"),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> TenantQueueMetrics:
+ """Get workflow metrics
+
+ Get the queue metrics for the tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflows: A list of workflow IDs to filter by
+ :type workflows: List[str]
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_get_queue_metrics_serialize(
+ tenant=tenant,
+ workflows=workflows,
+ additional_metadata=additional_metadata,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantQueueMetrics",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def tenant_get_queue_metrics_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflows: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of workflow IDs to filter by"),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[TenantQueueMetrics]:
+ """Get workflow metrics
+
+ Get the queue metrics for the tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflows: A list of workflow IDs to filter by
+ :type workflows: List[str]
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_get_queue_metrics_serialize(
+ tenant=tenant,
+ workflows=workflows,
+ additional_metadata=additional_metadata,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantQueueMetrics",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def tenant_get_queue_metrics_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflows: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of workflow IDs to filter by"),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workflow metrics
+
+ Get the queue metrics for the tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflows: A list of workflow IDs to filter by
+ :type workflows: List[str]
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._tenant_get_queue_metrics_serialize(
+ tenant=tenant,
+ workflows=workflows,
+ additional_metadata=additional_metadata,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "TenantQueueMetrics",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _tenant_get_queue_metrics_serialize(
+ self,
+ tenant,
+ workflows,
+ additional_metadata,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ "workflows": "multi",
+ "additionalMetadata": "multi",
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ if workflows is not None:
+
+ _query_params.append(("workflows", workflows))
+
+ if additional_metadata is not None:
+
+ _query_params.append(("additionalMetadata", additional_metadata))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/queue-metrics",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_cron_delete(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ cron_workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The cron job id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Delete cron job workflow run
+
+ Delete a cron job workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param cron_workflow: The cron job id (required)
+ :type cron_workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_cron_delete_serialize(
+ tenant=tenant,
+ cron_workflow=cron_workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_cron_delete_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ cron_workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The cron job id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Delete cron job workflow run
+
+ Delete a cron job workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param cron_workflow: The cron job id (required)
+ :type cron_workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_cron_delete_serialize(
+ tenant=tenant,
+ cron_workflow=cron_workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_cron_delete_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ cron_workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The cron job id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Delete cron job workflow run
+
+ Delete a cron job workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param cron_workflow: The cron job id (required)
+ :type cron_workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_cron_delete_serialize(
+ tenant=tenant,
+ cron_workflow=cron_workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_cron_delete_serialize(
+ self,
+ tenant,
+ cron_workflow,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if cron_workflow is not None:
+ _path_params["cron-workflow"] = cron_workflow
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="DELETE",
+ resource_path="/api/v1/tenants/{tenant}/workflows/crons/{cron-workflow}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_cron_get(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ cron_workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The cron job id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> CronWorkflows:
+ """Get cron job workflow run
+
+ Get a cron job workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param cron_workflow: The cron job id (required)
+ :type cron_workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_cron_get_serialize(
+ tenant=tenant,
+ cron_workflow=cron_workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "CronWorkflows",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_cron_get_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ cron_workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The cron job id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[CronWorkflows]:
+ """Get cron job workflow run
+
+ Get a cron job workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param cron_workflow: The cron job id (required)
+ :type cron_workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_cron_get_serialize(
+ tenant=tenant,
+ cron_workflow=cron_workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "CronWorkflows",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_cron_get_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ cron_workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The cron job id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get cron job workflow run
+
+ Get a cron job workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param cron_workflow: The cron job id (required)
+ :type cron_workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_cron_get_serialize(
+ tenant=tenant,
+ cron_workflow=cron_workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "CronWorkflows",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_cron_get_serialize(
+ self,
+ tenant,
+ cron_workflow,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if cron_workflow is not None:
+ _path_params["cron-workflow"] = cron_workflow
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/workflows/crons/{cron-workflow}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_delete(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Delete workflow
+
+ Delete a workflow for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_delete_serialize(
+ workflow=workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_delete_with_http_info(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Delete workflow
+
+ Delete a workflow for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_delete_serialize(
+ workflow=workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_delete_without_preload_content(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Delete workflow
+
+ Delete a workflow for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_delete_serialize(
+ workflow=workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_delete_serialize(
+ self,
+ workflow,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if workflow is not None:
+ _path_params["workflow"] = workflow
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="DELETE",
+ resource_path="/api/v1/workflows/{workflow}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_get(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> Workflow:
+ """Get workflow
+
+ Get a workflow for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_get_serialize(
+ workflow=workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Workflow",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_get_with_http_info(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[Workflow]:
+ """Get workflow
+
+ Get a workflow for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_get_serialize(
+ workflow=workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Workflow",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_get_without_preload_content(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workflow
+
+ Get a workflow for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_get_serialize(
+ workflow=workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Workflow",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_get_serialize(
+ self,
+ workflow,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if workflow is not None:
+ _path_params["workflow"] = workflow
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/workflows/{workflow}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_get_metrics(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ status: Annotated[
+ Optional[WorkflowRunStatus],
+ Field(description="A status of workflow run statuses to filter by"),
+ ] = None,
+ group_key: Annotated[
+ Optional[StrictStr], Field(description="A group key to filter metrics by")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WorkflowMetrics:
+ """Get workflow metrics
+
+ Get the metrics for a workflow version
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param status: A status of workflow run statuses to filter by
+ :type status: WorkflowRunStatus
+ :param group_key: A group key to filter metrics by
+ :type group_key: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_get_metrics_serialize(
+ workflow=workflow,
+ status=status,
+ group_key=group_key,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowMetrics",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_get_metrics_with_http_info(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ status: Annotated[
+ Optional[WorkflowRunStatus],
+ Field(description="A status of workflow run statuses to filter by"),
+ ] = None,
+ group_key: Annotated[
+ Optional[StrictStr], Field(description="A group key to filter metrics by")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WorkflowMetrics]:
+ """Get workflow metrics
+
+ Get the metrics for a workflow version
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param status: A status of workflow run statuses to filter by
+ :type status: WorkflowRunStatus
+ :param group_key: A group key to filter metrics by
+ :type group_key: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_get_metrics_serialize(
+ workflow=workflow,
+ status=status,
+ group_key=group_key,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowMetrics",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_get_metrics_without_preload_content(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ status: Annotated[
+ Optional[WorkflowRunStatus],
+ Field(description="A status of workflow run statuses to filter by"),
+ ] = None,
+ group_key: Annotated[
+ Optional[StrictStr], Field(description="A group key to filter metrics by")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workflow metrics
+
+ Get the metrics for a workflow version
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param status: A status of workflow run statuses to filter by
+ :type status: WorkflowRunStatus
+ :param group_key: A group key to filter metrics by
+ :type group_key: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_get_metrics_serialize(
+ workflow=workflow,
+ status=status,
+ group_key=group_key,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowMetrics",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_get_metrics_serialize(
+ self,
+ workflow,
+ status,
+ group_key,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if workflow is not None:
+ _path_params["workflow"] = workflow
+ # process the query parameters
+ if status is not None:
+
+ _query_params.append(("status", status.value))
+
+ if group_key is not None:
+
+ _query_params.append(("groupKey", group_key))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/workflows/{workflow}/metrics",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_get_workers_count(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WorkflowWorkersCount:
+ """Get workflow worker count
+
+ Get a count of the workers available for workflow
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_get_workers_count_serialize(
+ tenant=tenant,
+ workflow=workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowWorkersCount",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_get_workers_count_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WorkflowWorkersCount]:
+ """Get workflow worker count
+
+ Get a count of the workers available for workflow
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_get_workers_count_serialize(
+ tenant=tenant,
+ workflow=workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowWorkersCount",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_get_workers_count_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workflow worker count
+
+ Get a count of the workers available for workflow
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_get_workers_count_serialize(
+ tenant=tenant,
+ workflow=workflow,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowWorkersCount",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_get_workers_count_serialize(
+ self,
+ tenant,
+ workflow,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if workflow is not None:
+ _path_params["workflow"] = workflow
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/workflows/{workflow}/worker-count",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ name: Annotated[
+ Optional[StrictStr], Field(description="Search by name")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WorkflowList:
+ """Get workflows
+
+ Get all workflows for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param name: Search by name
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ name: Annotated[
+ Optional[StrictStr], Field(description="Search by name")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WorkflowList]:
+ """Get workflows
+
+ Get all workflows for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param name: Search by name
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ name: Annotated[
+ Optional[StrictStr], Field(description="Search by name")
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workflows
+
+ Get all workflows for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param name: Search by name
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_list_serialize(
+ self,
+ tenant,
+ offset,
+ limit,
+ name,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ if offset is not None:
+
+ _query_params.append(("offset", offset))
+
+ if limit is not None:
+
+ _query_params.append(("limit", limit))
+
+ if name is not None:
+
+ _query_params.append(("name", name))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/workflows",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_run_get(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WorkflowRun:
+ """Get workflow run
+
+ Get a workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_serialize(
+ tenant=tenant,
+ workflow_run=workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_run_get_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WorkflowRun]:
+ """Get workflow run
+
+ Get a workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_serialize(
+ tenant=tenant,
+ workflow_run=workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_run_get_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workflow run
+
+ Get a workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_serialize(
+ tenant=tenant,
+ workflow_run=workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_run_get_serialize(
+ self,
+ tenant,
+ workflow_run,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if workflow_run is not None:
+ _path_params["workflow-run"] = workflow_run
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/workflow-runs/{workflow-run}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_run_get_metrics(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ event_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The event id to get runs for."),
+ ] = None,
+ workflow_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The workflow id to get runs for."),
+ ] = None,
+ parent_workflow_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent workflow run id"),
+ ] = None,
+ parent_step_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent step run id"),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ created_after: Annotated[
+ Optional[datetime],
+ Field(description="The time after the workflow run was created"),
+ ] = None,
+ created_before: Annotated[
+ Optional[datetime],
+ Field(description="The time before the workflow run was created"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WorkflowRunsMetrics:
+ """Get workflow runs metrics
+
+ Get a summary of workflow run metrics for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param event_id: The event id to get runs for.
+ :type event_id: str
+ :param workflow_id: The workflow id to get runs for.
+ :type workflow_id: str
+ :param parent_workflow_run_id: The parent workflow run id
+ :type parent_workflow_run_id: str
+ :param parent_step_run_id: The parent step run id
+ :type parent_step_run_id: str
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param created_after: The time after the workflow run was created
+ :type created_after: datetime
+ :param created_before: The time before the workflow run was created
+ :type created_before: datetime
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_metrics_serialize(
+ tenant=tenant,
+ event_id=event_id,
+ workflow_id=workflow_id,
+ parent_workflow_run_id=parent_workflow_run_id,
+ parent_step_run_id=parent_step_run_id,
+ additional_metadata=additional_metadata,
+ created_after=created_after,
+ created_before=created_before,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRunsMetrics",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_run_get_metrics_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ event_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The event id to get runs for."),
+ ] = None,
+ workflow_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The workflow id to get runs for."),
+ ] = None,
+ parent_workflow_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent workflow run id"),
+ ] = None,
+ parent_step_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent step run id"),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ created_after: Annotated[
+ Optional[datetime],
+ Field(description="The time after the workflow run was created"),
+ ] = None,
+ created_before: Annotated[
+ Optional[datetime],
+ Field(description="The time before the workflow run was created"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WorkflowRunsMetrics]:
+ """Get workflow runs metrics
+
+ Get a summary of workflow run metrics for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param event_id: The event id to get runs for.
+ :type event_id: str
+ :param workflow_id: The workflow id to get runs for.
+ :type workflow_id: str
+ :param parent_workflow_run_id: The parent workflow run id
+ :type parent_workflow_run_id: str
+ :param parent_step_run_id: The parent step run id
+ :type parent_step_run_id: str
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param created_after: The time after the workflow run was created
+ :type created_after: datetime
+ :param created_before: The time before the workflow run was created
+ :type created_before: datetime
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_metrics_serialize(
+ tenant=tenant,
+ event_id=event_id,
+ workflow_id=workflow_id,
+ parent_workflow_run_id=parent_workflow_run_id,
+ parent_step_run_id=parent_step_run_id,
+ additional_metadata=additional_metadata,
+ created_after=created_after,
+ created_before=created_before,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRunsMetrics",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_run_get_metrics_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ event_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The event id to get runs for."),
+ ] = None,
+ workflow_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The workflow id to get runs for."),
+ ] = None,
+ parent_workflow_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent workflow run id"),
+ ] = None,
+ parent_step_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent step run id"),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ created_after: Annotated[
+ Optional[datetime],
+ Field(description="The time after the workflow run was created"),
+ ] = None,
+ created_before: Annotated[
+ Optional[datetime],
+ Field(description="The time before the workflow run was created"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workflow runs metrics
+
+ Get a summary of workflow run metrics for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param event_id: The event id to get runs for.
+ :type event_id: str
+ :param workflow_id: The workflow id to get runs for.
+ :type workflow_id: str
+ :param parent_workflow_run_id: The parent workflow run id
+ :type parent_workflow_run_id: str
+ :param parent_step_run_id: The parent step run id
+ :type parent_step_run_id: str
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param created_after: The time after the workflow run was created
+ :type created_after: datetime
+ :param created_before: The time before the workflow run was created
+ :type created_before: datetime
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_metrics_serialize(
+ tenant=tenant,
+ event_id=event_id,
+ workflow_id=workflow_id,
+ parent_workflow_run_id=parent_workflow_run_id,
+ parent_step_run_id=parent_step_run_id,
+ additional_metadata=additional_metadata,
+ created_after=created_after,
+ created_before=created_before,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRunsMetrics",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_run_get_metrics_serialize(
+ self,
+ tenant,
+ event_id,
+ workflow_id,
+ parent_workflow_run_id,
+ parent_step_run_id,
+ additional_metadata,
+ created_after,
+ created_before,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ "additionalMetadata": "multi",
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ if event_id is not None:
+
+ _query_params.append(("eventId", event_id))
+
+ if workflow_id is not None:
+
+ _query_params.append(("workflowId", workflow_id))
+
+ if parent_workflow_run_id is not None:
+
+ _query_params.append(("parentWorkflowRunId", parent_workflow_run_id))
+
+ if parent_step_run_id is not None:
+
+ _query_params.append(("parentStepRunId", parent_step_run_id))
+
+ if additional_metadata is not None:
+
+ _query_params.append(("additionalMetadata", additional_metadata))
+
+ if created_after is not None:
+ if isinstance(created_after, datetime):
+ _query_params.append(
+ (
+ "createdAfter",
+ created_after.strftime(
+ self.api_client.configuration.datetime_format
+ ),
+ )
+ )
+ else:
+ _query_params.append(("createdAfter", created_after))
+
+ if created_before is not None:
+ if isinstance(created_before, datetime):
+ _query_params.append(
+ (
+ "createdBefore",
+ created_before.strftime(
+ self.api_client.configuration.datetime_format
+ ),
+ )
+ )
+ else:
+ _query_params.append(("createdBefore", created_before))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/workflows/runs/metrics",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_run_get_shape(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WorkflowRunShape:
+ """Get workflow run
+
+ Get a workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_shape_serialize(
+ tenant=tenant,
+ workflow_run=workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRunShape",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_run_get_shape_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WorkflowRunShape]:
+ """Get workflow run
+
+ Get a workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_shape_serialize(
+ tenant=tenant,
+ workflow_run=workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRunShape",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_run_get_shape_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workflow run
+
+ Get a workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_shape_serialize(
+ tenant=tenant,
+ workflow_run=workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRunShape",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_run_get_shape_serialize(
+ self,
+ tenant,
+ workflow_run,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if workflow_run is not None:
+ _path_params["workflow-run"] = workflow_run
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/workflow-runs/{workflow-run}/shape",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_run_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ event_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The event id to get runs for."),
+ ] = None,
+ workflow_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The workflow id to get runs for."),
+ ] = None,
+ parent_workflow_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent workflow run id"),
+ ] = None,
+ parent_step_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent step run id"),
+ ] = None,
+ statuses: Annotated[
+ Optional[List[WorkflowRunStatus]],
+ Field(description="A list of workflow run statuses to filter by"),
+ ] = None,
+ kinds: Annotated[
+ Optional[List[WorkflowKind]],
+ Field(description="A list of workflow kinds to filter by"),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ created_after: Annotated[
+ Optional[datetime],
+ Field(description="The time after the workflow run was created"),
+ ] = None,
+ created_before: Annotated[
+ Optional[datetime],
+ Field(description="The time before the workflow run was created"),
+ ] = None,
+ finished_after: Annotated[
+ Optional[datetime],
+ Field(description="The time after the workflow run was finished"),
+ ] = None,
+ finished_before: Annotated[
+ Optional[datetime],
+ Field(description="The time before the workflow run was finished"),
+ ] = None,
+ order_by_field: Annotated[
+ Optional[WorkflowRunOrderByField], Field(description="The order by field")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[WorkflowRunOrderByDirection],
+ Field(description="The order by direction"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WorkflowRunList:
+ """Get workflow runs
+
+ Get all workflow runs for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param event_id: The event id to get runs for.
+ :type event_id: str
+ :param workflow_id: The workflow id to get runs for.
+ :type workflow_id: str
+ :param parent_workflow_run_id: The parent workflow run id
+ :type parent_workflow_run_id: str
+ :param parent_step_run_id: The parent step run id
+ :type parent_step_run_id: str
+ :param statuses: A list of workflow run statuses to filter by
+ :type statuses: List[WorkflowRunStatus]
+ :param kinds: A list of workflow kinds to filter by
+ :type kinds: List[WorkflowKind]
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param created_after: The time after the workflow run was created
+ :type created_after: datetime
+ :param created_before: The time before the workflow run was created
+ :type created_before: datetime
+ :param finished_after: The time after the workflow run was finished
+ :type finished_after: datetime
+ :param finished_before: The time before the workflow run was finished
+ :type finished_before: datetime
+ :param order_by_field: The order by field
+ :type order_by_field: WorkflowRunOrderByField
+ :param order_by_direction: The order by direction
+ :type order_by_direction: WorkflowRunOrderByDirection
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ event_id=event_id,
+ workflow_id=workflow_id,
+ parent_workflow_run_id=parent_workflow_run_id,
+ parent_step_run_id=parent_step_run_id,
+ statuses=statuses,
+ kinds=kinds,
+ additional_metadata=additional_metadata,
+ created_after=created_after,
+ created_before=created_before,
+ finished_after=finished_after,
+ finished_before=finished_before,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRunList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_run_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ event_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The event id to get runs for."),
+ ] = None,
+ workflow_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The workflow id to get runs for."),
+ ] = None,
+ parent_workflow_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent workflow run id"),
+ ] = None,
+ parent_step_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent step run id"),
+ ] = None,
+ statuses: Annotated[
+ Optional[List[WorkflowRunStatus]],
+ Field(description="A list of workflow run statuses to filter by"),
+ ] = None,
+ kinds: Annotated[
+ Optional[List[WorkflowKind]],
+ Field(description="A list of workflow kinds to filter by"),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ created_after: Annotated[
+ Optional[datetime],
+ Field(description="The time after the workflow run was created"),
+ ] = None,
+ created_before: Annotated[
+ Optional[datetime],
+ Field(description="The time before the workflow run was created"),
+ ] = None,
+ finished_after: Annotated[
+ Optional[datetime],
+ Field(description="The time after the workflow run was finished"),
+ ] = None,
+ finished_before: Annotated[
+ Optional[datetime],
+ Field(description="The time before the workflow run was finished"),
+ ] = None,
+ order_by_field: Annotated[
+ Optional[WorkflowRunOrderByField], Field(description="The order by field")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[WorkflowRunOrderByDirection],
+ Field(description="The order by direction"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WorkflowRunList]:
+ """Get workflow runs
+
+ Get all workflow runs for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param event_id: The event id to get runs for.
+ :type event_id: str
+ :param workflow_id: The workflow id to get runs for.
+ :type workflow_id: str
+ :param parent_workflow_run_id: The parent workflow run id
+ :type parent_workflow_run_id: str
+ :param parent_step_run_id: The parent step run id
+ :type parent_step_run_id: str
+ :param statuses: A list of workflow run statuses to filter by
+ :type statuses: List[WorkflowRunStatus]
+ :param kinds: A list of workflow kinds to filter by
+ :type kinds: List[WorkflowKind]
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param created_after: The time after the workflow run was created
+ :type created_after: datetime
+ :param created_before: The time before the workflow run was created
+ :type created_before: datetime
+ :param finished_after: The time after the workflow run was finished
+ :type finished_after: datetime
+ :param finished_before: The time before the workflow run was finished
+ :type finished_before: datetime
+ :param order_by_field: The order by field
+ :type order_by_field: WorkflowRunOrderByField
+ :param order_by_direction: The order by direction
+ :type order_by_direction: WorkflowRunOrderByDirection
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ event_id=event_id,
+ workflow_id=workflow_id,
+ parent_workflow_run_id=parent_workflow_run_id,
+ parent_step_run_id=parent_step_run_id,
+ statuses=statuses,
+ kinds=kinds,
+ additional_metadata=additional_metadata,
+ created_after=created_after,
+ created_before=created_before,
+ finished_after=finished_after,
+ finished_before=finished_before,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRunList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_run_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ event_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The event id to get runs for."),
+ ] = None,
+ workflow_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The workflow id to get runs for."),
+ ] = None,
+ parent_workflow_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent workflow run id"),
+ ] = None,
+ parent_step_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent step run id"),
+ ] = None,
+ statuses: Annotated[
+ Optional[List[WorkflowRunStatus]],
+ Field(description="A list of workflow run statuses to filter by"),
+ ] = None,
+ kinds: Annotated[
+ Optional[List[WorkflowKind]],
+ Field(description="A list of workflow kinds to filter by"),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ created_after: Annotated[
+ Optional[datetime],
+ Field(description="The time after the workflow run was created"),
+ ] = None,
+ created_before: Annotated[
+ Optional[datetime],
+ Field(description="The time before the workflow run was created"),
+ ] = None,
+ finished_after: Annotated[
+ Optional[datetime],
+ Field(description="The time after the workflow run was finished"),
+ ] = None,
+ finished_before: Annotated[
+ Optional[datetime],
+ Field(description="The time before the workflow run was finished"),
+ ] = None,
+ order_by_field: Annotated[
+ Optional[WorkflowRunOrderByField], Field(description="The order by field")
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[WorkflowRunOrderByDirection],
+ Field(description="The order by direction"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workflow runs
+
+ Get all workflow runs for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param event_id: The event id to get runs for.
+ :type event_id: str
+ :param workflow_id: The workflow id to get runs for.
+ :type workflow_id: str
+ :param parent_workflow_run_id: The parent workflow run id
+ :type parent_workflow_run_id: str
+ :param parent_step_run_id: The parent step run id
+ :type parent_step_run_id: str
+ :param statuses: A list of workflow run statuses to filter by
+ :type statuses: List[WorkflowRunStatus]
+ :param kinds: A list of workflow kinds to filter by
+ :type kinds: List[WorkflowKind]
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param created_after: The time after the workflow run was created
+ :type created_after: datetime
+ :param created_before: The time before the workflow run was created
+ :type created_before: datetime
+ :param finished_after: The time after the workflow run was finished
+ :type finished_after: datetime
+ :param finished_before: The time before the workflow run was finished
+ :type finished_before: datetime
+ :param order_by_field: The order by field
+ :type order_by_field: WorkflowRunOrderByField
+ :param order_by_direction: The order by direction
+ :type order_by_direction: WorkflowRunOrderByDirection
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ event_id=event_id,
+ workflow_id=workflow_id,
+ parent_workflow_run_id=parent_workflow_run_id,
+ parent_step_run_id=parent_step_run_id,
+ statuses=statuses,
+ kinds=kinds,
+ additional_metadata=additional_metadata,
+ created_after=created_after,
+ created_before=created_before,
+ finished_after=finished_after,
+ finished_before=finished_before,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRunList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_run_list_serialize(
+ self,
+ tenant,
+ offset,
+ limit,
+ event_id,
+ workflow_id,
+ parent_workflow_run_id,
+ parent_step_run_id,
+ statuses,
+ kinds,
+ additional_metadata,
+ created_after,
+ created_before,
+ finished_after,
+ finished_before,
+ order_by_field,
+ order_by_direction,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ "statuses": "multi",
+ "kinds": "multi",
+ "additionalMetadata": "multi",
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ if offset is not None:
+
+ _query_params.append(("offset", offset))
+
+ if limit is not None:
+
+ _query_params.append(("limit", limit))
+
+ if event_id is not None:
+
+ _query_params.append(("eventId", event_id))
+
+ if workflow_id is not None:
+
+ _query_params.append(("workflowId", workflow_id))
+
+ if parent_workflow_run_id is not None:
+
+ _query_params.append(("parentWorkflowRunId", parent_workflow_run_id))
+
+ if parent_step_run_id is not None:
+
+ _query_params.append(("parentStepRunId", parent_step_run_id))
+
+ if statuses is not None:
+
+ _query_params.append(("statuses", statuses))
+
+ if kinds is not None:
+
+ _query_params.append(("kinds", kinds))
+
+ if additional_metadata is not None:
+
+ _query_params.append(("additionalMetadata", additional_metadata))
+
+ if created_after is not None:
+ if isinstance(created_after, datetime):
+ _query_params.append(
+ (
+ "createdAfter",
+ created_after.strftime(
+ self.api_client.configuration.datetime_format
+ ),
+ )
+ )
+ else:
+ _query_params.append(("createdAfter", created_after))
+
+ if created_before is not None:
+ if isinstance(created_before, datetime):
+ _query_params.append(
+ (
+ "createdBefore",
+ created_before.strftime(
+ self.api_client.configuration.datetime_format
+ ),
+ )
+ )
+ else:
+ _query_params.append(("createdBefore", created_before))
+
+ if finished_after is not None:
+ if isinstance(finished_after, datetime):
+ _query_params.append(
+ (
+ "finishedAfter",
+ finished_after.strftime(
+ self.api_client.configuration.datetime_format
+ ),
+ )
+ )
+ else:
+ _query_params.append(("finishedAfter", finished_after))
+
+ if finished_before is not None:
+ if isinstance(finished_before, datetime):
+ _query_params.append(
+ (
+ "finishedBefore",
+ finished_before.strftime(
+ self.api_client.configuration.datetime_format
+ ),
+ )
+ )
+ else:
+ _query_params.append(("finishedBefore", finished_before))
+
+ if order_by_field is not None:
+
+ _query_params.append(("orderByField", order_by_field.value))
+
+ if order_by_direction is not None:
+
+ _query_params.append(("orderByDirection", order_by_direction.value))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/workflows/runs",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_scheduled_delete(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ scheduled_workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The scheduled workflow id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Delete scheduled workflow run
+
+ Delete a scheduled workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param scheduled_workflow_run: The scheduled workflow id (required)
+ :type scheduled_workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_scheduled_delete_serialize(
+ tenant=tenant,
+ scheduled_workflow_run=scheduled_workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_scheduled_delete_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ scheduled_workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The scheduled workflow id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Delete scheduled workflow run
+
+ Delete a scheduled workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param scheduled_workflow_run: The scheduled workflow id (required)
+ :type scheduled_workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_scheduled_delete_serialize(
+ tenant=tenant,
+ scheduled_workflow_run=scheduled_workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_scheduled_delete_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ scheduled_workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The scheduled workflow id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Delete scheduled workflow run
+
+ Delete a scheduled workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param scheduled_workflow_run: The scheduled workflow id (required)
+ :type scheduled_workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_scheduled_delete_serialize(
+ tenant=tenant,
+ scheduled_workflow_run=scheduled_workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "204": None,
+ "400": "APIErrors",
+ "403": "APIError",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_scheduled_delete_serialize(
+ self,
+ tenant,
+ scheduled_workflow_run,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if scheduled_workflow_run is not None:
+ _path_params["scheduled-workflow-run"] = scheduled_workflow_run
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="DELETE",
+ resource_path="/api/v1/tenants/{tenant}/workflows/scheduled/{scheduled-workflow-run}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_scheduled_get(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ scheduled_workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The scheduled workflow id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ScheduledWorkflows:
+ """Get scheduled workflow run
+
+ Get a scheduled workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param scheduled_workflow_run: The scheduled workflow id (required)
+ :type scheduled_workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_scheduled_get_serialize(
+ tenant=tenant,
+ scheduled_workflow_run=scheduled_workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ScheduledWorkflows",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_scheduled_get_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ scheduled_workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The scheduled workflow id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[ScheduledWorkflows]:
+ """Get scheduled workflow run
+
+ Get a scheduled workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param scheduled_workflow_run: The scheduled workflow id (required)
+ :type scheduled_workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_scheduled_get_serialize(
+ tenant=tenant,
+ scheduled_workflow_run=scheduled_workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ScheduledWorkflows",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_scheduled_get_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ scheduled_workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The scheduled workflow id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get scheduled workflow run
+
+ Get a scheduled workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param scheduled_workflow_run: The scheduled workflow id (required)
+ :type scheduled_workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_scheduled_get_serialize(
+ tenant=tenant,
+ scheduled_workflow_run=scheduled_workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ScheduledWorkflows",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_scheduled_get_serialize(
+ self,
+ tenant,
+ scheduled_workflow_run,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if scheduled_workflow_run is not None:
+ _path_params["scheduled-workflow-run"] = scheduled_workflow_run
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/workflows/scheduled/{scheduled-workflow-run}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_scheduled_list(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ order_by_field: Annotated[
+ Optional[ScheduledWorkflowsOrderByField],
+ Field(description="The order by field"),
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[WorkflowRunOrderByDirection],
+ Field(description="The order by direction"),
+ ] = None,
+ workflow_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The workflow id to get runs for."),
+ ] = None,
+ parent_workflow_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent workflow run id"),
+ ] = None,
+ parent_step_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent step run id"),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ statuses: Annotated[
+ Optional[List[ScheduledRunStatus]],
+ Field(description="A list of scheduled run statuses to filter by"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ScheduledWorkflowsList:
+ """Get scheduled workflow runs
+
+ Get all scheduled workflow runs for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param order_by_field: The order by field
+ :type order_by_field: ScheduledWorkflowsOrderByField
+ :param order_by_direction: The order by direction
+ :type order_by_direction: WorkflowRunOrderByDirection
+ :param workflow_id: The workflow id to get runs for.
+ :type workflow_id: str
+ :param parent_workflow_run_id: The parent workflow run id
+ :type parent_workflow_run_id: str
+ :param parent_step_run_id: The parent step run id
+ :type parent_step_run_id: str
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param statuses: A list of scheduled run statuses to filter by
+ :type statuses: List[ScheduledRunStatus]
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_scheduled_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ workflow_id=workflow_id,
+ parent_workflow_run_id=parent_workflow_run_id,
+ parent_step_run_id=parent_step_run_id,
+ additional_metadata=additional_metadata,
+ statuses=statuses,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ScheduledWorkflowsList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_scheduled_list_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ order_by_field: Annotated[
+ Optional[ScheduledWorkflowsOrderByField],
+ Field(description="The order by field"),
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[WorkflowRunOrderByDirection],
+ Field(description="The order by direction"),
+ ] = None,
+ workflow_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The workflow id to get runs for."),
+ ] = None,
+ parent_workflow_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent workflow run id"),
+ ] = None,
+ parent_step_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent step run id"),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ statuses: Annotated[
+ Optional[List[ScheduledRunStatus]],
+ Field(description="A list of scheduled run statuses to filter by"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[ScheduledWorkflowsList]:
+ """Get scheduled workflow runs
+
+ Get all scheduled workflow runs for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param order_by_field: The order by field
+ :type order_by_field: ScheduledWorkflowsOrderByField
+ :param order_by_direction: The order by direction
+ :type order_by_direction: WorkflowRunOrderByDirection
+ :param workflow_id: The workflow id to get runs for.
+ :type workflow_id: str
+ :param parent_workflow_run_id: The parent workflow run id
+ :type parent_workflow_run_id: str
+ :param parent_step_run_id: The parent step run id
+ :type parent_step_run_id: str
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param statuses: A list of scheduled run statuses to filter by
+ :type statuses: List[ScheduledRunStatus]
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_scheduled_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ workflow_id=workflow_id,
+ parent_workflow_run_id=parent_workflow_run_id,
+ parent_step_run_id=parent_step_run_id,
+ additional_metadata=additional_metadata,
+ statuses=statuses,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ScheduledWorkflowsList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_scheduled_list_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ offset: Annotated[
+ Optional[StrictInt], Field(description="The number to skip")
+ ] = None,
+ limit: Annotated[
+ Optional[StrictInt], Field(description="The number to limit by")
+ ] = None,
+ order_by_field: Annotated[
+ Optional[ScheduledWorkflowsOrderByField],
+ Field(description="The order by field"),
+ ] = None,
+ order_by_direction: Annotated[
+ Optional[WorkflowRunOrderByDirection],
+ Field(description="The order by direction"),
+ ] = None,
+ workflow_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The workflow id to get runs for."),
+ ] = None,
+ parent_workflow_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent workflow run id"),
+ ] = None,
+ parent_step_run_id: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(description="The parent step run id"),
+ ] = None,
+ additional_metadata: Annotated[
+ Optional[List[StrictStr]],
+ Field(description="A list of metadata key value pairs to filter by"),
+ ] = None,
+ statuses: Annotated[
+ Optional[List[ScheduledRunStatus]],
+ Field(description="A list of scheduled run statuses to filter by"),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get scheduled workflow runs
+
+ Get all scheduled workflow runs for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param offset: The number to skip
+ :type offset: int
+ :param limit: The number to limit by
+ :type limit: int
+ :param order_by_field: The order by field
+ :type order_by_field: ScheduledWorkflowsOrderByField
+ :param order_by_direction: The order by direction
+ :type order_by_direction: WorkflowRunOrderByDirection
+ :param workflow_id: The workflow id to get runs for.
+ :type workflow_id: str
+ :param parent_workflow_run_id: The parent workflow run id
+ :type parent_workflow_run_id: str
+ :param parent_step_run_id: The parent step run id
+ :type parent_step_run_id: str
+ :param additional_metadata: A list of metadata key value pairs to filter by
+ :type additional_metadata: List[str]
+ :param statuses: A list of scheduled run statuses to filter by
+ :type statuses: List[ScheduledRunStatus]
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_scheduled_list_serialize(
+ tenant=tenant,
+ offset=offset,
+ limit=limit,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ workflow_id=workflow_id,
+ parent_workflow_run_id=parent_workflow_run_id,
+ parent_step_run_id=parent_step_run_id,
+ additional_metadata=additional_metadata,
+ statuses=statuses,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ScheduledWorkflowsList",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_scheduled_list_serialize(
+ self,
+ tenant,
+ offset,
+ limit,
+ order_by_field,
+ order_by_direction,
+ workflow_id,
+ parent_workflow_run_id,
+ parent_step_run_id,
+ additional_metadata,
+ statuses,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ "additionalMetadata": "multi",
+ "statuses": "multi",
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ if offset is not None:
+
+ _query_params.append(("offset", offset))
+
+ if limit is not None:
+
+ _query_params.append(("limit", limit))
+
+ if order_by_field is not None:
+
+ _query_params.append(("orderByField", order_by_field.value))
+
+ if order_by_direction is not None:
+
+ _query_params.append(("orderByDirection", order_by_direction.value))
+
+ if workflow_id is not None:
+
+ _query_params.append(("workflowId", workflow_id))
+
+ if parent_workflow_run_id is not None:
+
+ _query_params.append(("parentWorkflowRunId", parent_workflow_run_id))
+
+ if parent_step_run_id is not None:
+
+ _query_params.append(("parentStepRunId", parent_step_run_id))
+
+ if additional_metadata is not None:
+
+ _query_params.append(("additionalMetadata", additional_metadata))
+
+ if statuses is not None:
+
+ _query_params.append(("statuses", statuses))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/workflows/scheduled",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_update(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ workflow_update_request: Annotated[
+ WorkflowUpdateRequest, Field(description="The input to update the workflow")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> Workflow:
+ """Update workflow
+
+ Update a workflow for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param workflow_update_request: The input to update the workflow (required)
+ :type workflow_update_request: WorkflowUpdateRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_update_serialize(
+ workflow=workflow,
+ workflow_update_request=workflow_update_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Workflow",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_update_with_http_info(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ workflow_update_request: Annotated[
+ WorkflowUpdateRequest, Field(description="The input to update the workflow")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[Workflow]:
+ """Update workflow
+
+ Update a workflow for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param workflow_update_request: The input to update the workflow (required)
+ :type workflow_update_request: WorkflowUpdateRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_update_serialize(
+ workflow=workflow,
+ workflow_update_request=workflow_update_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Workflow",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_update_without_preload_content(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ workflow_update_request: Annotated[
+ WorkflowUpdateRequest, Field(description="The input to update the workflow")
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Update workflow
+
+ Update a workflow for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param workflow_update_request: The input to update the workflow (required)
+ :type workflow_update_request: WorkflowUpdateRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_update_serialize(
+ workflow=workflow,
+ workflow_update_request=workflow_update_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Workflow",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_update_serialize(
+ self,
+ workflow,
+ workflow_update_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if workflow is not None:
+ _path_params["workflow"] = workflow
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if workflow_update_request is not None:
+ _body_params = workflow_update_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="PATCH",
+ resource_path="/api/v1/workflows/{workflow}",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_version_get(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ version: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(
+ description="The workflow version. If not supplied, the latest version is fetched."
+ ),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WorkflowVersion:
+ """Get workflow version
+
+ Get a workflow version for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param version: The workflow version. If not supplied, the latest version is fetched.
+ :type version: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_version_get_serialize(
+ workflow=workflow,
+ version=version,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowVersion",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_version_get_with_http_info(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ version: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(
+ description="The workflow version. If not supplied, the latest version is fetched."
+ ),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WorkflowVersion]:
+ """Get workflow version
+
+ Get a workflow version for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param version: The workflow version. If not supplied, the latest version is fetched.
+ :type version: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_version_get_serialize(
+ workflow=workflow,
+ version=version,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowVersion",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_version_get_without_preload_content(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ version: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(
+ description="The workflow version. If not supplied, the latest version is fetched."
+ ),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workflow version
+
+ Get a workflow version for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param version: The workflow version. If not supplied, the latest version is fetched.
+ :type version: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_version_get_serialize(
+ workflow=workflow,
+ version=version,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowVersion",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_version_get_serialize(
+ self,
+ workflow,
+ version,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if workflow is not None:
+ _path_params["workflow"] = workflow
+ # process the query parameters
+ if version is not None:
+
+ _query_params.append(("version", version))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/workflows/{workflow}/versions",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/workflow_run_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/workflow_run_api.py
new file mode 100644
index 00000000..e7feb9f4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/workflow_run_api.py
@@ -0,0 +1,1932 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.create_cron_workflow_trigger_request import (
+ CreateCronWorkflowTriggerRequest,
+)
+from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows
+from hatchet_sdk.clients.rest.models.event_update_cancel200_response import (
+ EventUpdateCancel200Response,
+)
+from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import (
+ ReplayWorkflowRunsRequest,
+)
+from hatchet_sdk.clients.rest.models.replay_workflow_runs_response import (
+ ReplayWorkflowRunsResponse,
+)
+from hatchet_sdk.clients.rest.models.schedule_workflow_run_request import (
+ ScheduleWorkflowRunRequest,
+)
+from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows
+from hatchet_sdk.clients.rest.models.trigger_workflow_run_request import (
+ TriggerWorkflowRunRequest,
+)
+from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun
+from hatchet_sdk.clients.rest.models.workflow_runs_cancel_request import (
+ WorkflowRunsCancelRequest,
+)
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class WorkflowRunApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def cron_workflow_trigger_create(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow: Annotated[StrictStr, Field(description="The workflow name")],
+ create_cron_workflow_trigger_request: Annotated[
+ CreateCronWorkflowTriggerRequest,
+ Field(description="The input to the cron job workflow trigger"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> CronWorkflows:
+ """Create cron job workflow trigger
+
+ Create a new cron job workflow trigger for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow: The workflow name (required)
+ :type workflow: str
+ :param create_cron_workflow_trigger_request: The input to the cron job workflow trigger (required)
+ :type create_cron_workflow_trigger_request: CreateCronWorkflowTriggerRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._cron_workflow_trigger_create_serialize(
+ tenant=tenant,
+ workflow=workflow,
+ create_cron_workflow_trigger_request=create_cron_workflow_trigger_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "CronWorkflows",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def cron_workflow_trigger_create_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow: Annotated[StrictStr, Field(description="The workflow name")],
+ create_cron_workflow_trigger_request: Annotated[
+ CreateCronWorkflowTriggerRequest,
+ Field(description="The input to the cron job workflow trigger"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[CronWorkflows]:
+ """Create cron job workflow trigger
+
+ Create a new cron job workflow trigger for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow: The workflow name (required)
+ :type workflow: str
+ :param create_cron_workflow_trigger_request: The input to the cron job workflow trigger (required)
+ :type create_cron_workflow_trigger_request: CreateCronWorkflowTriggerRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._cron_workflow_trigger_create_serialize(
+ tenant=tenant,
+ workflow=workflow,
+ create_cron_workflow_trigger_request=create_cron_workflow_trigger_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "CronWorkflows",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def cron_workflow_trigger_create_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow: Annotated[StrictStr, Field(description="The workflow name")],
+ create_cron_workflow_trigger_request: Annotated[
+ CreateCronWorkflowTriggerRequest,
+ Field(description="The input to the cron job workflow trigger"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Create cron job workflow trigger
+
+ Create a new cron job workflow trigger for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow: The workflow name (required)
+ :type workflow: str
+ :param create_cron_workflow_trigger_request: The input to the cron job workflow trigger (required)
+ :type create_cron_workflow_trigger_request: CreateCronWorkflowTriggerRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._cron_workflow_trigger_create_serialize(
+ tenant=tenant,
+ workflow=workflow,
+ create_cron_workflow_trigger_request=create_cron_workflow_trigger_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "CronWorkflows",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _cron_workflow_trigger_create_serialize(
+ self,
+ tenant,
+ workflow,
+ create_cron_workflow_trigger_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if workflow is not None:
+ _path_params["workflow"] = workflow
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if create_cron_workflow_trigger_request is not None:
+ _body_params = create_cron_workflow_trigger_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/workflows/{workflow}/crons",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def scheduled_workflow_run_create(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow: Annotated[StrictStr, Field(description="The workflow name")],
+ schedule_workflow_run_request: Annotated[
+ ScheduleWorkflowRunRequest,
+ Field(description="The input to the scheduled workflow run"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ScheduledWorkflows:
+ """Trigger workflow run
+
+ Schedule a new workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow: The workflow name (required)
+ :type workflow: str
+ :param schedule_workflow_run_request: The input to the scheduled workflow run (required)
+ :type schedule_workflow_run_request: ScheduleWorkflowRunRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._scheduled_workflow_run_create_serialize(
+ tenant=tenant,
+ workflow=workflow,
+ schedule_workflow_run_request=schedule_workflow_run_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ScheduledWorkflows",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def scheduled_workflow_run_create_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow: Annotated[StrictStr, Field(description="The workflow name")],
+ schedule_workflow_run_request: Annotated[
+ ScheduleWorkflowRunRequest,
+ Field(description="The input to the scheduled workflow run"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[ScheduledWorkflows]:
+ """Trigger workflow run
+
+ Schedule a new workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow: The workflow name (required)
+ :type workflow: str
+ :param schedule_workflow_run_request: The input to the scheduled workflow run (required)
+ :type schedule_workflow_run_request: ScheduleWorkflowRunRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._scheduled_workflow_run_create_serialize(
+ tenant=tenant,
+ workflow=workflow,
+ schedule_workflow_run_request=schedule_workflow_run_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ScheduledWorkflows",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def scheduled_workflow_run_create_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow: Annotated[StrictStr, Field(description="The workflow name")],
+ schedule_workflow_run_request: Annotated[
+ ScheduleWorkflowRunRequest,
+ Field(description="The input to the scheduled workflow run"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Trigger workflow run
+
+ Schedule a new workflow run for a tenant
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow: The workflow name (required)
+ :type workflow: str
+ :param schedule_workflow_run_request: The input to the scheduled workflow run (required)
+ :type schedule_workflow_run_request: ScheduleWorkflowRunRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._scheduled_workflow_run_create_serialize(
+ tenant=tenant,
+ workflow=workflow,
+ schedule_workflow_run_request=schedule_workflow_run_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ScheduledWorkflows",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _scheduled_workflow_run_create_serialize(
+ self,
+ tenant,
+ workflow,
+ schedule_workflow_run_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if workflow is not None:
+ _path_params["workflow"] = workflow
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if schedule_workflow_run_request is not None:
+ _body_params = schedule_workflow_run_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/workflows/{workflow}/scheduled",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_run_cancel(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_runs_cancel_request: Annotated[
+ WorkflowRunsCancelRequest,
+ Field(description="The input to cancel the workflow runs"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> EventUpdateCancel200Response:
+ """Cancel workflow runs
+
+ Cancel a batch of workflow runs
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_runs_cancel_request: The input to cancel the workflow runs (required)
+ :type workflow_runs_cancel_request: WorkflowRunsCancelRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_cancel_serialize(
+ tenant=tenant,
+ workflow_runs_cancel_request=workflow_runs_cancel_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventUpdateCancel200Response",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_run_cancel_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_runs_cancel_request: Annotated[
+ WorkflowRunsCancelRequest,
+ Field(description="The input to cancel the workflow runs"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[EventUpdateCancel200Response]:
+ """Cancel workflow runs
+
+ Cancel a batch of workflow runs
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_runs_cancel_request: The input to cancel the workflow runs (required)
+ :type workflow_runs_cancel_request: WorkflowRunsCancelRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_cancel_serialize(
+ tenant=tenant,
+ workflow_runs_cancel_request=workflow_runs_cancel_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventUpdateCancel200Response",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_run_cancel_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_runs_cancel_request: Annotated[
+ WorkflowRunsCancelRequest,
+ Field(description="The input to cancel the workflow runs"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Cancel workflow runs
+
+ Cancel a batch of workflow runs
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_runs_cancel_request: The input to cancel the workflow runs (required)
+ :type workflow_runs_cancel_request: WorkflowRunsCancelRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_cancel_serialize(
+ tenant=tenant,
+ workflow_runs_cancel_request=workflow_runs_cancel_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "EventUpdateCancel200Response",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_run_cancel_serialize(
+ self,
+ tenant,
+ workflow_runs_cancel_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if workflow_runs_cancel_request is not None:
+ _body_params = workflow_runs_cancel_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/workflows/cancel",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_run_create(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ trigger_workflow_run_request: Annotated[
+ TriggerWorkflowRunRequest,
+ Field(description="The input to the workflow run"),
+ ],
+ version: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(
+ description="The workflow version. If not supplied, the latest version is fetched."
+ ),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> WorkflowRun:
+ """Trigger workflow run
+
+ Trigger a new workflow run for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param trigger_workflow_run_request: The input to the workflow run (required)
+ :type trigger_workflow_run_request: TriggerWorkflowRunRequest
+ :param version: The workflow version. If not supplied, the latest version is fetched.
+ :type version: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_create_serialize(
+ workflow=workflow,
+ trigger_workflow_run_request=trigger_workflow_run_request,
+ version=version,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_run_create_with_http_info(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ trigger_workflow_run_request: Annotated[
+ TriggerWorkflowRunRequest,
+ Field(description="The input to the workflow run"),
+ ],
+ version: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(
+ description="The workflow version. If not supplied, the latest version is fetched."
+ ),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[WorkflowRun]:
+ """Trigger workflow run
+
+ Trigger a new workflow run for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param trigger_workflow_run_request: The input to the workflow run (required)
+ :type trigger_workflow_run_request: TriggerWorkflowRunRequest
+ :param version: The workflow version. If not supplied, the latest version is fetched.
+ :type version: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_create_serialize(
+ workflow=workflow,
+ trigger_workflow_run_request=trigger_workflow_run_request,
+ version=version,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_run_create_without_preload_content(
+ self,
+ workflow: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The workflow id"
+ ),
+ ],
+ trigger_workflow_run_request: Annotated[
+ TriggerWorkflowRunRequest,
+ Field(description="The input to the workflow run"),
+ ],
+ version: Annotated[
+ Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
+ Field(
+ description="The workflow version. If not supplied, the latest version is fetched."
+ ),
+ ] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Trigger workflow run
+
+ Trigger a new workflow run for a tenant
+
+ :param workflow: The workflow id (required)
+ :type workflow: str
+ :param trigger_workflow_run_request: The input to the workflow run (required)
+ :type trigger_workflow_run_request: TriggerWorkflowRunRequest
+ :param version: The workflow version. If not supplied, the latest version is fetched.
+ :type version: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_create_serialize(
+ workflow=workflow,
+ trigger_workflow_run_request=trigger_workflow_run_request,
+ version=version,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "WorkflowRun",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_run_create_serialize(
+ self,
+ workflow,
+ trigger_workflow_run_request,
+ version,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if workflow is not None:
+ _path_params["workflow"] = workflow
+ # process the query parameters
+ if version is not None:
+
+ _query_params.append(("version", version))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if trigger_workflow_run_request is not None:
+ _body_params = trigger_workflow_run_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/workflows/{workflow}/trigger",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_run_get_input(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> Dict[str, object]:
+ """Get workflow run input
+
+ Get the input for a workflow run.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_input_serialize(
+ tenant=tenant,
+ workflow_run=workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Dict[str, object]",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_run_get_input_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[Dict[str, object]]:
+ """Get workflow run input
+
+ Get the input for a workflow run.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_input_serialize(
+ tenant=tenant,
+ workflow_run=workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Dict[str, object]",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_run_get_input_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workflow run input
+
+ Get the input for a workflow run.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_input_serialize(
+ tenant=tenant,
+ workflow_run=workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Dict[str, object]",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_run_get_input_serialize(
+ self,
+ tenant,
+ workflow_run,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ if workflow_run is not None:
+ _path_params["workflow-run"] = workflow_run
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/tenants/{tenant}/workflow-runs/{workflow-run}/input",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_run_update_replay(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ replay_workflow_runs_request: Annotated[
+ ReplayWorkflowRunsRequest,
+ Field(description="The workflow run ids to replay"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ReplayWorkflowRunsResponse:
+ """Replay workflow runs
+
+ Replays a list of workflow runs.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param replay_workflow_runs_request: The workflow run ids to replay (required)
+ :type replay_workflow_runs_request: ReplayWorkflowRunsRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_update_replay_serialize(
+ tenant=tenant,
+ replay_workflow_runs_request=replay_workflow_runs_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ReplayWorkflowRunsResponse",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_run_update_replay_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ replay_workflow_runs_request: Annotated[
+ ReplayWorkflowRunsRequest,
+ Field(description="The workflow run ids to replay"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[ReplayWorkflowRunsResponse]:
+ """Replay workflow runs
+
+ Replays a list of workflow runs.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param replay_workflow_runs_request: The workflow run ids to replay (required)
+ :type replay_workflow_runs_request: ReplayWorkflowRunsRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_update_replay_serialize(
+ tenant=tenant,
+ replay_workflow_runs_request=replay_workflow_runs_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ReplayWorkflowRunsResponse",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_run_update_replay_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ replay_workflow_runs_request: Annotated[
+ ReplayWorkflowRunsRequest,
+ Field(description="The workflow run ids to replay"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Replay workflow runs
+
+ Replays a list of workflow runs.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param replay_workflow_runs_request: The workflow run ids to replay (required)
+ :type replay_workflow_runs_request: ReplayWorkflowRunsRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_update_replay_serialize(
+ tenant=tenant,
+ replay_workflow_runs_request=replay_workflow_runs_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ReplayWorkflowRunsResponse",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_run_update_replay_serialize(
+ self,
+ tenant,
+ replay_workflow_runs_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
+ ] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if replay_workflow_runs_request is not None:
+ _body_params = replay_workflow_runs_request
+
+ # set the HTTP header `Accept`
+ if "Accept" not in _header_params:
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/workflow-runs/replay",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/workflow_runs_api.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/workflow_runs_api.py
new file mode 100644
index 00000000..0572380b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api/workflow_runs_api.py
@@ -0,0 +1,610 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call
+from typing_extensions import Annotated
+
+from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import (
+ ReplayWorkflowRunsRequest,
+)
+from hatchet_sdk.clients.rest.models.replay_workflow_runs_response import (
+ ReplayWorkflowRunsResponse,
+)
+from hatchet_sdk.clients.rest.rest import RESTResponseType
+
+
+class WorkflowRunsApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+ @validate_call
+ async def workflow_run_get_input(
+ self,
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> Dict[str, object]:
+ """Get workflow run input
+
+ Get the input for a workflow run.
+
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_input_serialize(
+ workflow_run=workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Dict[str, object]",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_run_get_input_with_http_info(
+ self,
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[Dict[str, object]]:
+ """Get workflow run input
+
+ Get the input for a workflow run.
+
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_input_serialize(
+ workflow_run=workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Dict[str, object]",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_run_get_input_without_preload_content(
+ self,
+ workflow_run: Annotated[
+ str,
+ Field(
+ min_length=36,
+ strict=True,
+ max_length=36,
+ description="The workflow run id",
+ ),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get workflow run input
+
+ Get the input for a workflow run.
+
+ :param workflow_run: The workflow run id (required)
+ :type workflow_run: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_get_input_serialize(
+ workflow_run=workflow_run,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "Dict[str, object]",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "404": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_run_get_input_serialize(
+ self,
+ workflow_run,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if workflow_run is not None:
+ _path_params["workflow-run"] = workflow_run
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+ # set the HTTP header `Accept`
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="GET",
+ resource_path="/api/v1/workflow-runs/{workflow-run}/input",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
+
+ @validate_call
+ async def workflow_run_update_replay(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ replay_workflow_runs_request: Annotated[
+ ReplayWorkflowRunsRequest,
+ Field(description="The workflow run ids to replay"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ReplayWorkflowRunsResponse:
+ """Replay workflow runs
+
+ Replays a list of workflow runs.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param replay_workflow_runs_request: The workflow run ids to replay (required)
+ :type replay_workflow_runs_request: ReplayWorkflowRunsRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_update_replay_serialize(
+ tenant=tenant,
+ replay_workflow_runs_request=replay_workflow_runs_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ReplayWorkflowRunsResponse",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+ @validate_call
+ async def workflow_run_update_replay_with_http_info(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ replay_workflow_runs_request: Annotated[
+ ReplayWorkflowRunsRequest,
+ Field(description="The workflow run ids to replay"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[ReplayWorkflowRunsResponse]:
+ """Replay workflow runs
+
+ Replays a list of workflow runs.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param replay_workflow_runs_request: The workflow run ids to replay (required)
+ :type replay_workflow_runs_request: ReplayWorkflowRunsRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_update_replay_serialize(
+ tenant=tenant,
+ replay_workflow_runs_request=replay_workflow_runs_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ReplayWorkflowRunsResponse",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ await response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+ @validate_call
+ async def workflow_run_update_replay_without_preload_content(
+ self,
+ tenant: Annotated[
+ str,
+ Field(
+ min_length=36, strict=True, max_length=36, description="The tenant id"
+ ),
+ ],
+ replay_workflow_runs_request: Annotated[
+ ReplayWorkflowRunsRequest,
+ Field(description="The workflow run ids to replay"),
+ ],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
+ ],
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Replay workflow runs
+
+ Replays a list of workflow runs.
+
+ :param tenant: The tenant id (required)
+ :type tenant: str
+ :param replay_workflow_runs_request: The workflow run ids to replay (required)
+ :type replay_workflow_runs_request: ReplayWorkflowRunsRequest
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._workflow_run_update_replay_serialize(
+ tenant=tenant,
+ replay_workflow_runs_request=replay_workflow_runs_request,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index,
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ "200": "ReplayWorkflowRunsResponse",
+ "400": "APIErrors",
+ "403": "APIErrors",
+ "429": "APIErrors",
+ }
+ response_data = await self.api_client.call_api(
+ *_param, _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+ def _workflow_run_update_replay_serialize(
+ self,
+ tenant,
+ replay_workflow_runs_request,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {}
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if tenant is not None:
+ _path_params["tenant"] = tenant
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if replay_workflow_runs_request is not None:
+ _body_params = replay_workflow_runs_request
+
+ # set the HTTP header `Accept`
+ _header_params["Accept"] = self.api_client.select_header_accept(
+ ["application/json"]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params["Content-Type"] = _content_type
+ else:
+ _default_content_type = self.api_client.select_header_content_type(
+ ["application/json"]
+ )
+ if _default_content_type is not None:
+ _header_params["Content-Type"] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
+
+ return self.api_client.param_serialize(
+ method="POST",
+ resource_path="/api/v1/tenants/{tenant}/workflow-runs/replay",
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api_client.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api_client.py
new file mode 100644
index 00000000..76446dda
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api_client.py
@@ -0,0 +1,759 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import datetime
+import decimal
+import json
+import mimetypes
+import os
+import re
+import tempfile
+from enum import Enum
+from typing import Dict, List, Optional, Tuple, Union
+from urllib.parse import quote
+
+from dateutil.parser import parse
+from pydantic import SecretStr
+
+import hatchet_sdk.clients.rest.models
+from hatchet_sdk.clients.rest import rest
+from hatchet_sdk.clients.rest.api_response import ApiResponse
+from hatchet_sdk.clients.rest.api_response import T as ApiResponseT
+from hatchet_sdk.clients.rest.configuration import Configuration
+from hatchet_sdk.clients.rest.exceptions import (
+ ApiException,
+ ApiValueError,
+ BadRequestException,
+ ForbiddenException,
+ NotFoundException,
+ ServiceException,
+ UnauthorizedException,
+)
+
+RequestSerialized = Tuple[str, str, Dict[str, str], Optional[str], List[str]]
+
+
+class ApiClient:
+ """Generic API client for OpenAPI client library builds.
+
+ OpenAPI generic API client. This client handles the client-
+ server communication, and is invariant across implementations. Specifics of
+ the methods and models for each application are generated from the OpenAPI
+ templates.
+
+ :param configuration: .Configuration object for this client
+ :param header_name: a header to pass when making calls to the API.
+ :param header_value: a header value to pass when making calls to
+ the API.
+ :param cookie: a cookie to include in the header when making calls
+ to the API
+ """
+
+ PRIMITIVE_TYPES = (float, bool, bytes, str, int)
+ NATIVE_TYPES_MAPPING = {
+ "int": int,
+ "long": int, # TODO remove as only py3 is supported?
+ "float": float,
+ "str": str,
+ "bool": bool,
+ "date": datetime.date,
+ "datetime": datetime.datetime,
+ "decimal": decimal.Decimal,
+ "object": object,
+ }
+ _pool = None
+
+ def __init__(
+ self, configuration=None, header_name=None, header_value=None, cookie=None
+ ) -> None:
+ # use default configuration if none is provided
+ if configuration is None:
+ configuration = Configuration.get_default()
+ self.configuration = configuration
+
+ self.rest_client = rest.RESTClientObject(configuration)
+ self.default_headers = {}
+ if header_name is not None:
+ self.default_headers[header_name] = header_value
+ self.cookie = cookie
+ # Set default User-Agent.
+ self.user_agent = "OpenAPI-Generator/1.0.0/python"
+ self.client_side_validation = configuration.client_side_validation
+
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc_value, traceback):
+ await self.close()
+
+ async def close(self):
+ await self.rest_client.close()
+
+ @property
+ def user_agent(self):
+ """User agent for this API client"""
+ return self.default_headers["User-Agent"]
+
+ @user_agent.setter
+ def user_agent(self, value):
+ self.default_headers["User-Agent"] = value
+
+ def set_default_header(self, header_name, header_value):
+ self.default_headers[header_name] = header_value
+
+ _default = None
+
+ @classmethod
+ def get_default(cls):
+ """Return new instance of ApiClient.
+
+ This method returns newly created, based on default constructor,
+ object of ApiClient class or returns a copy of default
+ ApiClient.
+
+ :return: The ApiClient object.
+ """
+ if cls._default is None:
+ cls._default = ApiClient()
+ return cls._default
+
+ @classmethod
+ def set_default(cls, default):
+ """Set default instance of ApiClient.
+
+ It stores default ApiClient.
+
+ :param default: object of ApiClient.
+ """
+ cls._default = default
+
+ def param_serialize(
+ self,
+ method,
+ resource_path,
+ path_params=None,
+ query_params=None,
+ header_params=None,
+ body=None,
+ post_params=None,
+ files=None,
+ auth_settings=None,
+ collection_formats=None,
+ _host=None,
+ _request_auth=None,
+ ) -> RequestSerialized:
+ """Builds the HTTP request params needed by the request.
+ :param method: Method to call.
+ :param resource_path: Path to method endpoint.
+ :param path_params: Path parameters in the url.
+ :param query_params: Query parameters in the url.
+ :param header_params: Header parameters to be
+ placed in the request header.
+ :param body: Request body.
+ :param post_params dict: Request post form parameters,
+ for `application/x-www-form-urlencoded`, `multipart/form-data`.
+ :param auth_settings list: Auth Settings names for the request.
+ :param files dict: key -> filename, value -> filepath,
+ for `multipart/form-data`.
+ :param collection_formats: dict of collection formats for path, query,
+ header, and post parameters.
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the authentication
+ in the spec for a single request.
+ :return: tuple of form (path, http_method, query_params, header_params,
+ body, post_params, files)
+ """
+
+ config = self.configuration
+
+ # header parameters
+ header_params = header_params or {}
+ header_params.update(self.default_headers)
+ if self.cookie:
+ header_params["Cookie"] = self.cookie
+ if header_params:
+ header_params = self.sanitize_for_serialization(header_params)
+ header_params = dict(
+ self.parameters_to_tuples(header_params, collection_formats)
+ )
+
+ # path parameters
+ if path_params:
+ path_params = self.sanitize_for_serialization(path_params)
+ path_params = self.parameters_to_tuples(path_params, collection_formats)
+ for k, v in path_params:
+ # specified safe chars, encode everything
+ resource_path = resource_path.replace(
+ "{%s}" % k, quote(str(v), safe=config.safe_chars_for_path_param)
+ )
+
+ # post parameters
+ if post_params or files:
+ post_params = post_params if post_params else []
+ post_params = self.sanitize_for_serialization(post_params)
+ post_params = self.parameters_to_tuples(post_params, collection_formats)
+ if files:
+ post_params.extend(self.files_parameters(files))
+
+ # auth setting
+ self.update_params_for_auth(
+ header_params,
+ query_params,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ request_auth=_request_auth,
+ )
+
+ # body
+ if body:
+ body = self.sanitize_for_serialization(body)
+
+ # request url
+ if _host is None or self.configuration.ignore_operation_servers:
+ url = self.configuration.host + resource_path
+ else:
+ # use server/host defined in path or operation instead
+ url = _host + resource_path
+
+ # query parameters
+ if query_params:
+ query_params = self.sanitize_for_serialization(query_params)
+ url_query = self.parameters_to_url_query(query_params, collection_formats)
+ url += "?" + url_query
+
+ return method, url, header_params, body, post_params
+
+ async def call_api(
+ self,
+ method,
+ url,
+ header_params=None,
+ body=None,
+ post_params=None,
+ _request_timeout=None,
+ ) -> rest.RESTResponse:
+ """Makes the HTTP request (synchronous)
+ :param method: Method to call.
+ :param url: Path to method endpoint.
+ :param header_params: Header parameters to be
+ placed in the request header.
+ :param body: Request body.
+ :param post_params dict: Request post form parameters,
+ for `application/x-www-form-urlencoded`, `multipart/form-data`.
+ :param _request_timeout: timeout setting for this request.
+ :return: RESTResponse
+ """
+
+ try:
+ # perform request and return response
+ response_data = await self.rest_client.request(
+ method,
+ url,
+ headers=header_params,
+ body=body,
+ post_params=post_params,
+ _request_timeout=_request_timeout,
+ )
+
+ except ApiException as e:
+ raise e
+
+ return response_data
+
+ def response_deserialize(
+ self,
+ response_data: rest.RESTResponse,
+ response_types_map: Optional[Dict[str, ApiResponseT]] = None,
+ ) -> ApiResponse[ApiResponseT]:
+ """Deserializes response into an object.
+ :param response_data: RESTResponse object to be deserialized.
+ :param response_types_map: dict of response types.
+ :return: ApiResponse
+ """
+
+ msg = "RESTResponse.read() must be called before passing it to response_deserialize()"
+ assert response_data.data is not None, msg
+
+ response_type = response_types_map.get(str(response_data.status), None)
+ if (
+ not response_type
+ and isinstance(response_data.status, int)
+ and 100 <= response_data.status <= 599
+ ):
+ # if not found, look for '1XX', '2XX', etc.
+ response_type = response_types_map.get(
+ str(response_data.status)[0] + "XX", None
+ )
+
+ # deserialize response data
+ response_text = None
+ return_data = None
+ try:
+ if response_type == "bytearray":
+ return_data = response_data.data
+ elif response_type == "file":
+ return_data = self.__deserialize_file(response_data)
+ elif response_type is not None:
+ match = None
+ content_type = response_data.getheader("content-type")
+ if content_type is not None:
+ match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type)
+ encoding = match.group(1) if match else "utf-8"
+ response_text = response_data.data.decode(encoding)
+ return_data = self.deserialize(
+ response_text, response_type, content_type
+ )
+ finally:
+ if not 200 <= response_data.status <= 299:
+ raise ApiException.from_response(
+ http_resp=response_data,
+ body=response_text,
+ data=return_data,
+ )
+
+ return ApiResponse(
+ status_code=response_data.status,
+ data=return_data,
+ headers=response_data.getheaders(),
+ raw_data=response_data.data,
+ )
+
+ def sanitize_for_serialization(self, obj):
+ """Builds a JSON POST object.
+
+ If obj is None, return None.
+ If obj is SecretStr, return obj.get_secret_value()
+ If obj is str, int, long, float, bool, return directly.
+ If obj is datetime.datetime, datetime.date
+ convert to string in iso8601 format.
+ If obj is decimal.Decimal return string representation.
+ If obj is list, sanitize each element in the list.
+ If obj is dict, return the dict.
+ If obj is OpenAPI model, return the properties dict.
+
+ :param obj: The data to serialize.
+ :return: The serialized form of data.
+ """
+ if obj is None:
+ return None
+ elif isinstance(obj, Enum):
+ return obj.value
+ elif isinstance(obj, SecretStr):
+ return obj.get_secret_value()
+ elif isinstance(obj, self.PRIMITIVE_TYPES):
+ return obj
+ elif isinstance(obj, list):
+ return [self.sanitize_for_serialization(sub_obj) for sub_obj in obj]
+ elif isinstance(obj, tuple):
+ return tuple(self.sanitize_for_serialization(sub_obj) for sub_obj in obj)
+ elif isinstance(obj, (datetime.datetime, datetime.date)):
+ return obj.isoformat()
+ elif isinstance(obj, decimal.Decimal):
+ return str(obj)
+
+ elif isinstance(obj, dict):
+ obj_dict = obj
+ else:
+ # Convert model obj to dict except
+ # attributes `openapi_types`, `attribute_map`
+ # and attributes which value is not None.
+ # Convert attribute name to json key in
+ # model definition for request.
+ if hasattr(obj, "to_dict") and callable(getattr(obj, "to_dict")):
+ obj_dict = obj.to_dict()
+ else:
+ obj_dict = obj.__dict__
+
+ return {
+ key: self.sanitize_for_serialization(val) for key, val in obj_dict.items()
+ }
+
+ def deserialize(
+ self, response_text: str, response_type: str, content_type: Optional[str]
+ ):
+ """Deserializes response into an object.
+
+ :param response: RESTResponse object to be deserialized.
+ :param response_type: class literal for
+ deserialized object, or string of class name.
+ :param content_type: content type of response.
+
+ :return: deserialized object.
+ """
+
+ # fetch data from response object
+ if content_type is None:
+ try:
+ data = json.loads(response_text)
+ except ValueError:
+ data = response_text
+ elif re.match(
+ r"^application/(json|[\w!#$&.+-^_]+\+json)\s*(;|$)",
+ content_type,
+ re.IGNORECASE,
+ ):
+ if response_text == "":
+ data = ""
+ else:
+ data = json.loads(response_text)
+ elif re.match(r"^text\/[a-z.+-]+\s*(;|$)", content_type, re.IGNORECASE):
+ data = response_text
+ else:
+ raise ApiException(
+ status=0, reason="Unsupported content type: {0}".format(content_type)
+ )
+
+ return self.__deserialize(data, response_type)
+
+ def __deserialize(self, data, klass):
+ """Deserializes dict, list, str into an object.
+
+ :param data: dict, list or str.
+ :param klass: class literal, or string of class name.
+
+ :return: object.
+ """
+ if data is None:
+ return None
+
+ if isinstance(klass, str):
+ if klass.startswith("List["):
+ m = re.match(r"List\[(.*)]", klass)
+ assert m is not None, "Malformed List type definition"
+ sub_kls = m.group(1)
+ return [self.__deserialize(sub_data, sub_kls) for sub_data in data]
+
+ if klass.startswith("Dict["):
+ m = re.match(r"Dict\[([^,]*), (.*)]", klass)
+ assert m is not None, "Malformed Dict type definition"
+ sub_kls = m.group(2)
+ return {k: self.__deserialize(v, sub_kls) for k, v in data.items()}
+
+ # convert str to class
+ if klass in self.NATIVE_TYPES_MAPPING:
+ klass = self.NATIVE_TYPES_MAPPING[klass]
+ else:
+ klass = getattr(hatchet_sdk.clients.rest.models, klass)
+
+ if klass in self.PRIMITIVE_TYPES:
+ return self.__deserialize_primitive(data, klass)
+ elif klass == object:
+ return self.__deserialize_object(data)
+ elif klass == datetime.date:
+ return self.__deserialize_date(data)
+ elif klass == datetime.datetime:
+ return self.__deserialize_datetime(data)
+ elif klass == decimal.Decimal:
+ return decimal.Decimal(data)
+ elif issubclass(klass, Enum):
+ return self.__deserialize_enum(data, klass)
+ else:
+ return self.__deserialize_model(data, klass)
+
+ def parameters_to_tuples(self, params, collection_formats):
+ """Get parameters as list of tuples, formatting collections.
+
+ :param params: Parameters as dict or list of two-tuples
+ :param dict collection_formats: Parameter collection formats
+ :return: Parameters as list of tuples, collections formatted
+ """
+ new_params: List[Tuple[str, str]] = []
+ if collection_formats is None:
+ collection_formats = {}
+ for k, v in params.items() if isinstance(params, dict) else params:
+ if k in collection_formats:
+ collection_format = collection_formats[k]
+ if collection_format == "multi":
+ new_params.extend((k, value) for value in v)
+ else:
+ if collection_format == "ssv":
+ delimiter = " "
+ elif collection_format == "tsv":
+ delimiter = "\t"
+ elif collection_format == "pipes":
+ delimiter = "|"
+ else: # csv is the default
+ delimiter = ","
+ new_params.append((k, delimiter.join(str(value) for value in v)))
+ else:
+ new_params.append((k, v))
+ return new_params
+
+ def parameters_to_url_query(self, params, collection_formats):
+ """Get parameters as list of tuples, formatting collections.
+
+ :param params: Parameters as dict or list of two-tuples
+ :param dict collection_formats: Parameter collection formats
+ :return: URL query string (e.g. a=Hello%20World&b=123)
+ """
+ new_params: List[Tuple[str, str]] = []
+ if collection_formats is None:
+ collection_formats = {}
+ for k, v in params.items() if isinstance(params, dict) else params:
+ if isinstance(v, bool):
+ v = str(v).lower()
+ if isinstance(v, (int, float)):
+ v = str(v)
+ if isinstance(v, dict):
+ v = json.dumps(v)
+
+ if k in collection_formats:
+ collection_format = collection_formats[k]
+ if collection_format == "multi":
+ new_params.extend((k, str(value)) for value in v)
+ else:
+ if collection_format == "ssv":
+ delimiter = " "
+ elif collection_format == "tsv":
+ delimiter = "\t"
+ elif collection_format == "pipes":
+ delimiter = "|"
+ else: # csv is the default
+ delimiter = ","
+ new_params.append(
+ (k, delimiter.join(quote(str(value)) for value in v))
+ )
+ else:
+ new_params.append((k, quote(str(v))))
+
+ return "&".join(["=".join(map(str, item)) for item in new_params])
+
+ def files_parameters(
+ self,
+ files: Dict[str, Union[str, bytes, List[str], List[bytes], Tuple[str, bytes]]],
+ ):
+ """Builds form parameters.
+
+ :param files: File parameters.
+ :return: Form parameters with files.
+ """
+ params = []
+ for k, v in files.items():
+ if isinstance(v, str):
+ with open(v, "rb") as f:
+ filename = os.path.basename(f.name)
+ filedata = f.read()
+ elif isinstance(v, bytes):
+ filename = k
+ filedata = v
+ elif isinstance(v, tuple):
+ filename, filedata = v
+ elif isinstance(v, list):
+ for file_param in v:
+ params.extend(self.files_parameters({k: file_param}))
+ continue
+ else:
+ raise ValueError("Unsupported file value")
+ mimetype = mimetypes.guess_type(filename)[0] or "application/octet-stream"
+ params.append(tuple([k, tuple([filename, filedata, mimetype])]))
+ return params
+
+ def select_header_accept(self, accepts: List[str]) -> Optional[str]:
+ """Returns `Accept` based on an array of accepts provided.
+
+ :param accepts: List of headers.
+ :return: Accept (e.g. application/json).
+ """
+ if not accepts:
+ return None
+
+ for accept in accepts:
+ if re.search("json", accept, re.IGNORECASE):
+ return accept
+
+ return accepts[0]
+
+ def select_header_content_type(self, content_types):
+ """Returns `Content-Type` based on an array of content_types provided.
+
+ :param content_types: List of content-types.
+ :return: Content-Type (e.g. application/json).
+ """
+ if not content_types:
+ return None
+
+ for content_type in content_types:
+ if re.search("json", content_type, re.IGNORECASE):
+ return content_type
+
+ return content_types[0]
+
+ def update_params_for_auth(
+ self,
+ headers,
+ queries,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ request_auth=None,
+ ) -> None:
+ """Updates header and query params based on authentication setting.
+
+ :param headers: Header parameters dict to be updated.
+ :param queries: Query parameters tuple list to be updated.
+ :param auth_settings: Authentication setting identifiers list.
+ :resource_path: A string representation of the HTTP request resource path.
+ :method: A string representation of the HTTP request method.
+ :body: A object representing the body of the HTTP request.
+ The object type is the return value of sanitize_for_serialization().
+ :param request_auth: if set, the provided settings will
+ override the token in the configuration.
+ """
+ if not auth_settings:
+ return
+
+ if request_auth:
+ self._apply_auth_params(
+ headers, queries, resource_path, method, body, request_auth
+ )
+ else:
+ for auth in auth_settings:
+ auth_setting = self.configuration.auth_settings().get(auth)
+ if auth_setting:
+ self._apply_auth_params(
+ headers, queries, resource_path, method, body, auth_setting
+ )
+
+ def _apply_auth_params(
+ self, headers, queries, resource_path, method, body, auth_setting
+ ) -> None:
+ """Updates the request parameters based on a single auth_setting
+
+ :param headers: Header parameters dict to be updated.
+ :param queries: Query parameters tuple list to be updated.
+ :resource_path: A string representation of the HTTP request resource path.
+ :method: A string representation of the HTTP request method.
+ :body: A object representing the body of the HTTP request.
+ The object type is the return value of sanitize_for_serialization().
+ :param auth_setting: auth settings for the endpoint
+ """
+ if auth_setting["in"] == "cookie":
+ headers["Cookie"] = auth_setting["value"]
+ elif auth_setting["in"] == "header":
+ if auth_setting["type"] != "http-signature":
+ headers[auth_setting["key"]] = auth_setting["value"]
+ elif auth_setting["in"] == "query":
+ queries.append((auth_setting["key"], auth_setting["value"]))
+ else:
+ raise ApiValueError("Authentication token must be in `query` or `header`")
+
+ def __deserialize_file(self, response):
+ """Deserializes body to file
+
+ Saves response body into a file in a temporary folder,
+ using the filename from the `Content-Disposition` header if provided.
+
+ handle file downloading
+ save response body into a tmp file and return the instance
+
+ :param response: RESTResponse.
+ :return: file path.
+ """
+ fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
+ os.close(fd)
+ os.remove(path)
+
+ content_disposition = response.getheader("Content-Disposition")
+ if content_disposition:
+ m = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', content_disposition)
+ assert m is not None, "Unexpected 'content-disposition' header value"
+ filename = m.group(1)
+ path = os.path.join(os.path.dirname(path), filename)
+
+ with open(path, "wb") as f:
+ f.write(response.data)
+
+ return path
+
+ def __deserialize_primitive(self, data, klass):
+ """Deserializes string to primitive type.
+
+ :param data: str.
+ :param klass: class literal.
+
+ :return: int, long, float, str, bool.
+ """
+ try:
+ return klass(data)
+ except UnicodeEncodeError:
+ return str(data)
+ except TypeError:
+ return data
+
+ def __deserialize_object(self, value):
+ """Return an original value.
+
+ :return: object.
+ """
+ return value
+
+ def __deserialize_date(self, string):
+ """Deserializes string to date.
+
+ :param string: str.
+ :return: date.
+ """
+ try:
+ return parse(string).date()
+ except ImportError:
+ return string
+ except ValueError:
+ raise rest.ApiException(
+ status=0, reason="Failed to parse `{0}` as date object".format(string)
+ )
+
+ def __deserialize_datetime(self, string):
+ """Deserializes string to datetime.
+
+ The string should be in iso8601 datetime format.
+
+ :param string: str.
+ :return: datetime.
+ """
+ try:
+ return parse(string)
+ except ImportError:
+ return string
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason=("Failed to parse `{0}` as datetime object".format(string)),
+ )
+
+ def __deserialize_enum(self, data, klass):
+ """Deserializes primitive type to enum.
+
+ :param data: primitive type.
+ :param klass: class literal.
+ :return: enum value.
+ """
+ try:
+ return klass(data)
+ except ValueError:
+ raise rest.ApiException(
+ status=0, reason=("Failed to parse `{0}` as `{1}`".format(data, klass))
+ )
+
+ def __deserialize_model(self, data, klass):
+ """Deserializes list or dict to model.
+
+ :param data: dict, list.
+ :param klass: class literal.
+ :return: model object.
+ """
+
+ return klass.from_dict(data)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api_response.py
new file mode 100644
index 00000000..ca801da0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/api_response.py
@@ -0,0 +1,22 @@
+"""API response object."""
+
+from __future__ import annotations
+
+from typing import Generic, Mapping, Optional, TypeVar
+
+from pydantic import BaseModel, Field, StrictBytes, StrictInt
+
+T = TypeVar("T")
+
+
+class ApiResponse(BaseModel, Generic[T]):
+ """
+ API response object
+ """
+
+ status_code: StrictInt = Field(description="HTTP status code")
+ headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers")
+ data: T = Field(description="Deserialized data given the data type")
+ raw_data: StrictBytes = Field(description="Raw data (HTTP response body)")
+
+ model_config = {"arbitrary_types_allowed": True}
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/configuration.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/configuration.py
new file mode 100644
index 00000000..635209d2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/configuration.py
@@ -0,0 +1,611 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import copy
+import http.client as httplib
+import logging
+import sys
+from logging import FileHandler
+from typing import Any, ClassVar, Dict, List, Literal, Optional, TypedDict
+
+import urllib3
+from typing_extensions import NotRequired, Self
+
+JSON_SCHEMA_VALIDATION_KEYWORDS = {
+ "multipleOf",
+ "maximum",
+ "exclusiveMaximum",
+ "minimum",
+ "exclusiveMinimum",
+ "maxLength",
+ "minLength",
+ "pattern",
+ "maxItems",
+ "minItems",
+}
+
+ServerVariablesT = Dict[str, str]
+
+GenericAuthSetting = TypedDict(
+ "GenericAuthSetting",
+ {
+ "type": str,
+ "in": str,
+ "key": str,
+ "value": str,
+ },
+)
+
+
+OAuth2AuthSetting = TypedDict(
+ "OAuth2AuthSetting",
+ {
+ "type": Literal["oauth2"],
+ "in": Literal["header"],
+ "key": Literal["Authorization"],
+ "value": str,
+ },
+)
+
+
+APIKeyAuthSetting = TypedDict(
+ "APIKeyAuthSetting",
+ {
+ "type": Literal["api_key"],
+ "in": str,
+ "key": str,
+ "value": Optional[str],
+ },
+)
+
+
+BasicAuthSetting = TypedDict(
+ "BasicAuthSetting",
+ {
+ "type": Literal["basic"],
+ "in": Literal["header"],
+ "key": Literal["Authorization"],
+ "value": Optional[str],
+ },
+)
+
+
+BearerFormatAuthSetting = TypedDict(
+ "BearerFormatAuthSetting",
+ {
+ "type": Literal["bearer"],
+ "in": Literal["header"],
+ "format": Literal["JWT"],
+ "key": Literal["Authorization"],
+ "value": str,
+ },
+)
+
+
+BearerAuthSetting = TypedDict(
+ "BearerAuthSetting",
+ {
+ "type": Literal["bearer"],
+ "in": Literal["header"],
+ "key": Literal["Authorization"],
+ "value": str,
+ },
+)
+
+
+HTTPSignatureAuthSetting = TypedDict(
+ "HTTPSignatureAuthSetting",
+ {
+ "type": Literal["http-signature"],
+ "in": Literal["header"],
+ "key": Literal["Authorization"],
+ "value": None,
+ },
+)
+
+
+AuthSettings = TypedDict(
+ "AuthSettings",
+ {
+ "bearerAuth": BearerAuthSetting,
+ "cookieAuth": APIKeyAuthSetting,
+ },
+ total=False,
+)
+
+
+class HostSettingVariable(TypedDict):
+ description: str
+ default_value: str
+ enum_values: List[str]
+
+
+class HostSetting(TypedDict):
+ url: str
+ description: str
+ variables: NotRequired[Dict[str, HostSettingVariable]]
+
+
+class Configuration:
+ """This class contains various settings of the API client.
+
+ :param host: Base url.
+ :param ignore_operation_servers
+ Boolean to ignore operation servers for the API client.
+ Config will use `host` as the base url regardless of the operation servers.
+ :param api_key: Dict to store API key(s).
+ Each entry in the dict specifies an API key.
+ The dict key is the name of the security scheme in the OAS specification.
+ The dict value is the API key secret.
+ :param api_key_prefix: Dict to store API prefix (e.g. Bearer).
+ The dict key is the name of the security scheme in the OAS specification.
+ The dict value is an API key prefix when generating the auth data.
+ :param username: Username for HTTP basic authentication.
+ :param password: Password for HTTP basic authentication.
+ :param access_token: Access token.
+ :param server_index: Index to servers configuration.
+ :param server_variables: Mapping with string values to replace variables in
+ templated server configuration. The validation of enums is performed for
+ variables with defined enum values before.
+ :param server_operation_index: Mapping from operation ID to an index to server
+ configuration.
+ :param server_operation_variables: Mapping from operation ID to a mapping with
+ string values to replace variables in templated server configuration.
+ The validation of enums is performed for variables with defined enum
+ values before.
+ :param ssl_ca_cert: str - the path to a file of concatenated CA certificates
+ in PEM format.
+ :param retries: Number of retries for API requests.
+
+ :Example:
+
+ API Key Authentication Example.
+ Given the following security scheme in the OpenAPI specification:
+ components:
+ securitySchemes:
+ cookieAuth: # name for the security scheme
+ type: apiKey
+ in: cookie
+ name: JSESSIONID # cookie name
+
+ You can programmatically set the cookie:
+
+ conf = hatchet_sdk.clients.rest.Configuration(
+ api_key={'cookieAuth': 'abc123'}
+ api_key_prefix={'cookieAuth': 'JSESSIONID'}
+ )
+
+ The following cookie will be added to the HTTP request:
+ Cookie: JSESSIONID abc123
+ """
+
+ _default: ClassVar[Optional[Self]] = None
+
+ def __init__(
+ self,
+ host: Optional[str] = None,
+ api_key: Optional[Dict[str, str]] = None,
+ api_key_prefix: Optional[Dict[str, str]] = None,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ access_token: Optional[str] = None,
+ server_index: Optional[int] = None,
+ server_variables: Optional[ServerVariablesT] = None,
+ server_operation_index: Optional[Dict[int, int]] = None,
+ server_operation_variables: Optional[Dict[int, ServerVariablesT]] = None,
+ ignore_operation_servers: bool = False,
+ ssl_ca_cert: Optional[str] = None,
+ retries: Optional[int] = None,
+ *,
+ debug: Optional[bool] = None,
+ ) -> None:
+ """Constructor"""
+ self._base_path = "http://localhost" if host is None else host
+ """Default Base url
+ """
+ self.server_index = 0 if server_index is None and host is None else server_index
+ self.server_operation_index = server_operation_index or {}
+ """Default server index
+ """
+ self.server_variables = server_variables or {}
+ self.server_operation_variables = server_operation_variables or {}
+ """Default server variables
+ """
+ self.ignore_operation_servers = ignore_operation_servers
+ """Ignore operation servers
+ """
+ self.temp_folder_path = None
+ """Temp file folder for downloading files
+ """
+ # Authentication Settings
+ self.api_key = {}
+ if api_key:
+ self.api_key = api_key
+ """dict to store API key(s)
+ """
+ self.api_key_prefix = {}
+ if api_key_prefix:
+ self.api_key_prefix = api_key_prefix
+ """dict to store API prefix (e.g. Bearer)
+ """
+ self.refresh_api_key_hook = None
+ """function hook to refresh API key if expired
+ """
+ self.username = username
+ """Username for HTTP basic authentication
+ """
+ self.password = password
+ """Password for HTTP basic authentication
+ """
+ self.access_token = access_token
+ """Access token
+ """
+ self.logger = {}
+ """Logging Settings
+ """
+ self.logger["package_logger"] = logging.getLogger("hatchet_sdk.clients.rest")
+ self.logger["urllib3_logger"] = logging.getLogger("urllib3")
+ self.logger_format = "%(asctime)s %(levelname)s %(message)s"
+ """Log format
+ """
+ self.logger_stream_handler = None
+ """Log stream handler
+ """
+ self.logger_file_handler: Optional[FileHandler] = None
+ """Log file handler
+ """
+ self.logger_file = None
+ """Debug file location
+ """
+ if debug is not None:
+ self.debug = debug
+ else:
+ self.__debug = False
+ """Debug switch
+ """
+
+ self.verify_ssl = True
+ """SSL/TLS verification
+ Set this to false to skip verifying SSL certificate when calling API
+ from https server.
+ """
+ self.ssl_ca_cert = ssl_ca_cert
+ """Set this to customize the certificate file to verify the peer.
+ """
+ self.cert_file = None
+ """client certificate file
+ """
+ self.key_file = None
+ """client key file
+ """
+ self.assert_hostname = None
+ """Set this to True/False to enable/disable SSL hostname verification.
+ """
+ self.tls_server_name = None
+ """SSL/TLS Server Name Indication (SNI)
+ Set this to the SNI value expected by the server.
+ """
+
+ self.connection_pool_maxsize = 100
+ """This value is passed to the aiohttp to limit simultaneous connections.
+ Default values is 100, None means no-limit.
+ """
+
+ self.proxy: Optional[str] = None
+ """Proxy URL
+ """
+ self.proxy_headers = None
+ """Proxy headers
+ """
+ self.safe_chars_for_path_param = ""
+ """Safe chars for path_param
+ """
+ self.retries = retries
+ """Adding retries to override urllib3 default value 3
+ """
+ # Enable client side validation
+ self.client_side_validation = True
+
+ self.socket_options = None
+ """Options to pass down to the underlying urllib3 socket
+ """
+
+ self.datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z"
+ """datetime format
+ """
+
+ self.date_format = "%Y-%m-%d"
+ """date format
+ """
+
+ def __deepcopy__(self, memo: Dict[int, Any]) -> Self:
+ cls = self.__class__
+ result = cls.__new__(cls)
+ memo[id(self)] = result
+ for k, v in self.__dict__.items():
+ if k not in ("logger", "logger_file_handler"):
+ setattr(result, k, copy.deepcopy(v, memo))
+ # shallow copy of loggers
+ result.logger = copy.copy(self.logger)
+ # use setters to configure loggers
+ result.logger_file = self.logger_file
+ result.debug = self.debug
+ return result
+
+ def __setattr__(self, name: str, value: Any) -> None:
+ object.__setattr__(self, name, value)
+
+ @classmethod
+ def set_default(cls, default: Optional[Self]) -> None:
+ """Set default instance of configuration.
+
+ It stores default configuration, which can be
+ returned by get_default_copy method.
+
+ :param default: object of Configuration
+ """
+ cls._default = default
+
+ @classmethod
+ def get_default_copy(cls) -> Self:
+ """Deprecated. Please use `get_default` instead.
+
+ Deprecated. Please use `get_default` instead.
+
+ :return: The configuration object.
+ """
+ return cls.get_default()
+
+ @classmethod
+ def get_default(cls) -> Self:
+ """Return the default configuration.
+
+ This method returns newly created, based on default constructor,
+ object of Configuration class or returns a copy of default
+ configuration.
+
+ :return: The configuration object.
+ """
+ if cls._default is None:
+ cls._default = cls()
+ return cls._default
+
+ @property
+ def logger_file(self) -> Optional[str]:
+ """The logger file.
+
+ If the logger_file is None, then add stream handler and remove file
+ handler. Otherwise, add file handler and remove stream handler.
+
+ :param value: The logger_file path.
+ :type: str
+ """
+ return self.__logger_file
+
+ @logger_file.setter
+ def logger_file(self, value: Optional[str]) -> None:
+ """The logger file.
+
+ If the logger_file is None, then add stream handler and remove file
+ handler. Otherwise, add file handler and remove stream handler.
+
+ :param value: The logger_file path.
+ :type: str
+ """
+ self.__logger_file = value
+ if self.__logger_file:
+ # If set logging file,
+ # then add file handler and remove stream handler.
+ self.logger_file_handler = logging.FileHandler(self.__logger_file)
+ self.logger_file_handler.setFormatter(self.logger_formatter)
+ for _, logger in self.logger.items():
+ logger.addHandler(self.logger_file_handler)
+
+ @property
+ def debug(self) -> bool:
+ """Debug status
+
+ :param value: The debug status, True or False.
+ :type: bool
+ """
+ return self.__debug
+
+ @debug.setter
+ def debug(self, value: bool) -> None:
+ """Debug status
+
+ :param value: The debug status, True or False.
+ :type: bool
+ """
+ self.__debug = value
+ if self.__debug:
+ # if debug status is True, turn on debug logging
+ for _, logger in self.logger.items():
+ logger.setLevel(logging.DEBUG)
+ # turn on httplib debug
+ httplib.HTTPConnection.debuglevel = 1
+ else:
+ # if debug status is False, turn off debug logging,
+ # setting log level to default `logging.WARNING`
+ for _, logger in self.logger.items():
+ logger.setLevel(logging.WARNING)
+ # turn off httplib debug
+ httplib.HTTPConnection.debuglevel = 0
+
+ @property
+ def logger_format(self) -> str:
+ """The logger format.
+
+ The logger_formatter will be updated when sets logger_format.
+
+ :param value: The format string.
+ :type: str
+ """
+ return self.__logger_format
+
+ @logger_format.setter
+ def logger_format(self, value: str) -> None:
+ """The logger format.
+
+ The logger_formatter will be updated when sets logger_format.
+
+ :param value: The format string.
+ :type: str
+ """
+ self.__logger_format = value
+ self.logger_formatter = logging.Formatter(self.__logger_format)
+
+ def get_api_key_with_prefix(
+ self, identifier: str, alias: Optional[str] = None
+ ) -> Optional[str]:
+ """Gets API key (with prefix if set).
+
+ :param identifier: The identifier of apiKey.
+ :param alias: The alternative identifier of apiKey.
+ :return: The token for api key authentication.
+ """
+ if self.refresh_api_key_hook is not None:
+ self.refresh_api_key_hook(self)
+ key = self.api_key.get(
+ identifier, self.api_key.get(alias) if alias is not None else None
+ )
+ if key:
+ prefix = self.api_key_prefix.get(identifier)
+ if prefix:
+ return "%s %s" % (prefix, key)
+ else:
+ return key
+
+ return None
+
+ def get_basic_auth_token(self) -> Optional[str]:
+ """Gets HTTP basic authentication header (string).
+
+ :return: The token for basic HTTP authentication.
+ """
+ username = ""
+ if self.username is not None:
+ username = self.username
+ password = ""
+ if self.password is not None:
+ password = self.password
+ return urllib3.util.make_headers(basic_auth=username + ":" + password).get(
+ "authorization"
+ )
+
+ def auth_settings(self) -> AuthSettings:
+ """Gets Auth Settings dict for api client.
+
+ :return: The Auth Settings information dict.
+ """
+ auth: AuthSettings = {}
+ if self.access_token is not None:
+ auth["bearerAuth"] = {
+ "type": "bearer",
+ "in": "header",
+ "key": "Authorization",
+ "value": "Bearer " + self.access_token,
+ }
+ if "cookieAuth" in self.api_key:
+ auth["cookieAuth"] = {
+ "type": "api_key",
+ "in": "cookie",
+ "key": "hatchet",
+ "value": self.get_api_key_with_prefix(
+ "cookieAuth",
+ ),
+ }
+ return auth
+
+ def to_debug_report(self) -> str:
+ """Gets the essential information for debugging.
+
+ :return: The report for debugging.
+ """
+ return (
+ "Python SDK Debug Report:\n"
+ "OS: {env}\n"
+ "Python Version: {pyversion}\n"
+ "Version of the API: 1.0.0\n"
+ "SDK Package Version: 1.0.0".format(env=sys.platform, pyversion=sys.version)
+ )
+
+ def get_host_settings(self) -> List[HostSetting]:
+ """Gets an array of host settings
+
+ :return: An array of host settings
+ """
+ return [
+ {
+ "url": "",
+ "description": "No description provided",
+ }
+ ]
+
+ def get_host_from_settings(
+ self,
+ index: Optional[int],
+ variables: Optional[ServerVariablesT] = None,
+ servers: Optional[List[HostSetting]] = None,
+ ) -> str:
+ """Gets host URL based on the index and variables
+ :param index: array index of the host settings
+ :param variables: hash of variable and the corresponding value
+ :param servers: an array of host settings or None
+ :return: URL based on host settings
+ """
+ if index is None:
+ return self._base_path
+
+ variables = {} if variables is None else variables
+ servers = self.get_host_settings() if servers is None else servers
+
+ try:
+ server = servers[index]
+ except IndexError:
+ raise ValueError(
+ "Invalid index {0} when selecting the host settings. "
+ "Must be less than {1}".format(index, len(servers))
+ )
+
+ url = server["url"]
+
+ # go through variables and replace placeholders
+ for variable_name, variable in server.get("variables", {}).items():
+ used_value = variables.get(variable_name, variable["default_value"])
+
+ if "enum_values" in variable and used_value not in variable["enum_values"]:
+ raise ValueError(
+ "The variable `{0}` in the host URL has invalid value "
+ "{1}. Must be {2}.".format(
+ variable_name, variables[variable_name], variable["enum_values"]
+ )
+ )
+
+ url = url.replace("{" + variable_name + "}", used_value)
+
+ return url
+
+ @property
+ def host(self) -> str:
+ """Return generated host."""
+ return self.get_host_from_settings(
+ self.server_index, variables=self.server_variables
+ )
+
+ @host.setter
+ def host(self, value: str) -> None:
+ """Fix base path."""
+ self._base_path = value
+ self.server_index = None
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/exceptions.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/exceptions.py
new file mode 100644
index 00000000..b41ac1d2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/exceptions.py
@@ -0,0 +1,200 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+from typing import Any, Optional
+
+from typing_extensions import Self
+
+
+class OpenApiException(Exception):
+ """The base exception class for all OpenAPIExceptions"""
+
+
+class ApiTypeError(OpenApiException, TypeError):
+ def __init__(
+ self, msg, path_to_item=None, valid_classes=None, key_type=None
+ ) -> None:
+ """Raises an exception for TypeErrors
+
+ Args:
+ msg (str): the exception message
+
+ Keyword Args:
+ path_to_item (list): a list of keys an indices to get to the
+ current_item
+ None if unset
+ valid_classes (tuple): the primitive classes that current item
+ should be an instance of
+ None if unset
+ key_type (bool): False if our value is a value in a dict
+ True if it is a key in a dict
+ False if our item is an item in a list
+ None if unset
+ """
+ self.path_to_item = path_to_item
+ self.valid_classes = valid_classes
+ self.key_type = key_type
+ full_msg = msg
+ if path_to_item:
+ full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
+ super(ApiTypeError, self).__init__(full_msg)
+
+
+class ApiValueError(OpenApiException, ValueError):
+ def __init__(self, msg, path_to_item=None) -> None:
+ """
+ Args:
+ msg (str): the exception message
+
+ Keyword Args:
+ path_to_item (list) the path to the exception in the
+ received_data dict. None if unset
+ """
+
+ self.path_to_item = path_to_item
+ full_msg = msg
+ if path_to_item:
+ full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
+ super(ApiValueError, self).__init__(full_msg)
+
+
+class ApiAttributeError(OpenApiException, AttributeError):
+ def __init__(self, msg, path_to_item=None) -> None:
+ """
+ Raised when an attribute reference or assignment fails.
+
+ Args:
+ msg (str): the exception message
+
+ Keyword Args:
+ path_to_item (None/list) the path to the exception in the
+ received_data dict
+ """
+ self.path_to_item = path_to_item
+ full_msg = msg
+ if path_to_item:
+ full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
+ super(ApiAttributeError, self).__init__(full_msg)
+
+
+class ApiKeyError(OpenApiException, KeyError):
+ def __init__(self, msg, path_to_item=None) -> None:
+ """
+ Args:
+ msg (str): the exception message
+
+ Keyword Args:
+ path_to_item (None/list) the path to the exception in the
+ received_data dict
+ """
+ self.path_to_item = path_to_item
+ full_msg = msg
+ if path_to_item:
+ full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
+ super(ApiKeyError, self).__init__(full_msg)
+
+
+class ApiException(OpenApiException):
+
+ def __init__(
+ self,
+ status=None,
+ reason=None,
+ http_resp=None,
+ *,
+ body: Optional[str] = None,
+ data: Optional[Any] = None,
+ ) -> None:
+ self.status = status
+ self.reason = reason
+ self.body = body
+ self.data = data
+ self.headers = None
+
+ if http_resp:
+ if self.status is None:
+ self.status = http_resp.status
+ if self.reason is None:
+ self.reason = http_resp.reason
+ if self.body is None:
+ try:
+ self.body = http_resp.data.decode("utf-8")
+ except Exception:
+ pass
+ self.headers = http_resp.getheaders()
+
+ @classmethod
+ def from_response(
+ cls,
+ *,
+ http_resp,
+ body: Optional[str],
+ data: Optional[Any],
+ ) -> Self:
+ if http_resp.status == 400:
+ raise BadRequestException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 401:
+ raise UnauthorizedException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 403:
+ raise ForbiddenException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 404:
+ raise NotFoundException(http_resp=http_resp, body=body, data=data)
+
+ if 500 <= http_resp.status <= 599:
+ raise ServiceException(http_resp=http_resp, body=body, data=data)
+ raise ApiException(http_resp=http_resp, body=body, data=data)
+
+ def __str__(self):
+ """Custom error messages for exception"""
+ error_message = "({0})\n" "Reason: {1}\n".format(self.status, self.reason)
+ if self.headers:
+ error_message += "HTTP response headers: {0}\n".format(self.headers)
+
+ if self.data or self.body:
+ error_message += "HTTP response body: {0}\n".format(self.data or self.body)
+
+ return error_message
+
+
+class BadRequestException(ApiException):
+ pass
+
+
+class NotFoundException(ApiException):
+ pass
+
+
+class UnauthorizedException(ApiException):
+ pass
+
+
+class ForbiddenException(ApiException):
+ pass
+
+
+class ServiceException(ApiException):
+ pass
+
+
+def render_path(path_to_item):
+ """Returns a string representation of a path"""
+ result = ""
+ for pth in path_to_item:
+ if isinstance(pth, int):
+ result += "[{0}]".format(pth)
+ else:
+ result += "['{0}']".format(pth)
+ return result
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/__init__.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/__init__.py
new file mode 100644
index 00000000..386c4c03
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/__init__.py
@@ -0,0 +1,260 @@
+# coding: utf-8
+
+# flake8: noqa
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from hatchet_sdk.clients.rest.models.accept_invite_request import AcceptInviteRequest
+
+# import models into model package
+from hatchet_sdk.clients.rest.models.api_error import APIError
+from hatchet_sdk.clients.rest.models.api_errors import APIErrors
+from hatchet_sdk.clients.rest.models.api_meta import APIMeta
+from hatchet_sdk.clients.rest.models.api_meta_auth import APIMetaAuth
+from hatchet_sdk.clients.rest.models.api_meta_integration import APIMetaIntegration
+from hatchet_sdk.clients.rest.models.api_meta_posthog import APIMetaPosthog
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.api_token import APIToken
+from hatchet_sdk.clients.rest.models.bulk_create_event_request import (
+ BulkCreateEventRequest,
+)
+from hatchet_sdk.clients.rest.models.cancel_event_request import CancelEventRequest
+from hatchet_sdk.clients.rest.models.concurrency_limit_strategy import (
+ ConcurrencyLimitStrategy,
+)
+from hatchet_sdk.clients.rest.models.create_api_token_request import (
+ CreateAPITokenRequest,
+)
+from hatchet_sdk.clients.rest.models.create_api_token_response import (
+ CreateAPITokenResponse,
+)
+from hatchet_sdk.clients.rest.models.create_cron_workflow_trigger_request import (
+ CreateCronWorkflowTriggerRequest,
+)
+from hatchet_sdk.clients.rest.models.create_event_request import CreateEventRequest
+from hatchet_sdk.clients.rest.models.create_pull_request_from_step_run import (
+ CreatePullRequestFromStepRun,
+)
+from hatchet_sdk.clients.rest.models.create_sns_integration_request import (
+ CreateSNSIntegrationRequest,
+)
+from hatchet_sdk.clients.rest.models.create_tenant_alert_email_group_request import (
+ CreateTenantAlertEmailGroupRequest,
+)
+from hatchet_sdk.clients.rest.models.create_tenant_invite_request import (
+ CreateTenantInviteRequest,
+)
+from hatchet_sdk.clients.rest.models.create_tenant_request import CreateTenantRequest
+from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows
+from hatchet_sdk.clients.rest.models.cron_workflows_list import CronWorkflowsList
+from hatchet_sdk.clients.rest.models.cron_workflows_method import CronWorkflowsMethod
+from hatchet_sdk.clients.rest.models.cron_workflows_order_by_field import (
+ CronWorkflowsOrderByField,
+)
+from hatchet_sdk.clients.rest.models.event import Event
+from hatchet_sdk.clients.rest.models.event_data import EventData
+from hatchet_sdk.clients.rest.models.event_key_list import EventKeyList
+from hatchet_sdk.clients.rest.models.event_list import EventList
+from hatchet_sdk.clients.rest.models.event_order_by_direction import (
+ EventOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.event_order_by_field import EventOrderByField
+from hatchet_sdk.clients.rest.models.event_update_cancel200_response import (
+ EventUpdateCancel200Response,
+)
+from hatchet_sdk.clients.rest.models.event_workflow_run_summary import (
+ EventWorkflowRunSummary,
+)
+from hatchet_sdk.clients.rest.models.events import Events
+from hatchet_sdk.clients.rest.models.get_step_run_diff_response import (
+ GetStepRunDiffResponse,
+)
+from hatchet_sdk.clients.rest.models.info_get_version200_response import (
+ InfoGetVersion200Response,
+)
+from hatchet_sdk.clients.rest.models.job import Job
+from hatchet_sdk.clients.rest.models.job_run import JobRun
+from hatchet_sdk.clients.rest.models.job_run_status import JobRunStatus
+from hatchet_sdk.clients.rest.models.list_api_tokens_response import (
+ ListAPITokensResponse,
+)
+from hatchet_sdk.clients.rest.models.list_pull_requests_response import (
+ ListPullRequestsResponse,
+)
+from hatchet_sdk.clients.rest.models.list_slack_webhooks import ListSlackWebhooks
+from hatchet_sdk.clients.rest.models.list_sns_integrations import ListSNSIntegrations
+from hatchet_sdk.clients.rest.models.log_line import LogLine
+from hatchet_sdk.clients.rest.models.log_line_level import LogLineLevel
+from hatchet_sdk.clients.rest.models.log_line_list import LogLineList
+from hatchet_sdk.clients.rest.models.log_line_order_by_direction import (
+ LogLineOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.log_line_order_by_field import LogLineOrderByField
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.pull_request import PullRequest
+from hatchet_sdk.clients.rest.models.pull_request_state import PullRequestState
+from hatchet_sdk.clients.rest.models.queue_metrics import QueueMetrics
+from hatchet_sdk.clients.rest.models.rate_limit import RateLimit
+from hatchet_sdk.clients.rest.models.rate_limit_list import RateLimitList
+from hatchet_sdk.clients.rest.models.rate_limit_order_by_direction import (
+ RateLimitOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.rate_limit_order_by_field import (
+ RateLimitOrderByField,
+)
+from hatchet_sdk.clients.rest.models.recent_step_runs import RecentStepRuns
+from hatchet_sdk.clients.rest.models.reject_invite_request import RejectInviteRequest
+from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest
+from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import (
+ ReplayWorkflowRunsRequest,
+)
+from hatchet_sdk.clients.rest.models.replay_workflow_runs_response import (
+ ReplayWorkflowRunsResponse,
+)
+from hatchet_sdk.clients.rest.models.rerun_step_run_request import RerunStepRunRequest
+from hatchet_sdk.clients.rest.models.schedule_workflow_run_request import (
+ ScheduleWorkflowRunRequest,
+)
+from hatchet_sdk.clients.rest.models.scheduled_run_status import ScheduledRunStatus
+from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows
+from hatchet_sdk.clients.rest.models.scheduled_workflows_list import (
+ ScheduledWorkflowsList,
+)
+from hatchet_sdk.clients.rest.models.scheduled_workflows_method import (
+ ScheduledWorkflowsMethod,
+)
+from hatchet_sdk.clients.rest.models.scheduled_workflows_order_by_field import (
+ ScheduledWorkflowsOrderByField,
+)
+from hatchet_sdk.clients.rest.models.semaphore_slots import SemaphoreSlots
+from hatchet_sdk.clients.rest.models.slack_webhook import SlackWebhook
+from hatchet_sdk.clients.rest.models.sns_integration import SNSIntegration
+from hatchet_sdk.clients.rest.models.step import Step
+from hatchet_sdk.clients.rest.models.step_run import StepRun
+from hatchet_sdk.clients.rest.models.step_run_archive import StepRunArchive
+from hatchet_sdk.clients.rest.models.step_run_archive_list import StepRunArchiveList
+from hatchet_sdk.clients.rest.models.step_run_diff import StepRunDiff
+from hatchet_sdk.clients.rest.models.step_run_event import StepRunEvent
+from hatchet_sdk.clients.rest.models.step_run_event_list import StepRunEventList
+from hatchet_sdk.clients.rest.models.step_run_event_reason import StepRunEventReason
+from hatchet_sdk.clients.rest.models.step_run_event_severity import StepRunEventSeverity
+from hatchet_sdk.clients.rest.models.step_run_status import StepRunStatus
+from hatchet_sdk.clients.rest.models.tenant import Tenant
+from hatchet_sdk.clients.rest.models.tenant_alert_email_group import (
+ TenantAlertEmailGroup,
+)
+from hatchet_sdk.clients.rest.models.tenant_alert_email_group_list import (
+ TenantAlertEmailGroupList,
+)
+from hatchet_sdk.clients.rest.models.tenant_alerting_settings import (
+ TenantAlertingSettings,
+)
+from hatchet_sdk.clients.rest.models.tenant_invite import TenantInvite
+from hatchet_sdk.clients.rest.models.tenant_invite_list import TenantInviteList
+from hatchet_sdk.clients.rest.models.tenant_list import TenantList
+from hatchet_sdk.clients.rest.models.tenant_member import TenantMember
+from hatchet_sdk.clients.rest.models.tenant_member_list import TenantMemberList
+from hatchet_sdk.clients.rest.models.tenant_member_role import TenantMemberRole
+from hatchet_sdk.clients.rest.models.tenant_queue_metrics import TenantQueueMetrics
+from hatchet_sdk.clients.rest.models.tenant_resource import TenantResource
+from hatchet_sdk.clients.rest.models.tenant_resource_limit import TenantResourceLimit
+from hatchet_sdk.clients.rest.models.tenant_resource_policy import TenantResourcePolicy
+from hatchet_sdk.clients.rest.models.tenant_step_run_queue_metrics import (
+ TenantStepRunQueueMetrics,
+)
+from hatchet_sdk.clients.rest.models.trigger_workflow_run_request import (
+ TriggerWorkflowRunRequest,
+)
+from hatchet_sdk.clients.rest.models.update_tenant_alert_email_group_request import (
+ UpdateTenantAlertEmailGroupRequest,
+)
+from hatchet_sdk.clients.rest.models.update_tenant_invite_request import (
+ UpdateTenantInviteRequest,
+)
+from hatchet_sdk.clients.rest.models.update_tenant_request import UpdateTenantRequest
+from hatchet_sdk.clients.rest.models.update_worker_request import UpdateWorkerRequest
+from hatchet_sdk.clients.rest.models.user import User
+from hatchet_sdk.clients.rest.models.user_change_password_request import (
+ UserChangePasswordRequest,
+)
+from hatchet_sdk.clients.rest.models.user_login_request import UserLoginRequest
+from hatchet_sdk.clients.rest.models.user_register_request import UserRegisterRequest
+from hatchet_sdk.clients.rest.models.user_tenant_memberships_list import (
+ UserTenantMembershipsList,
+)
+from hatchet_sdk.clients.rest.models.user_tenant_public import UserTenantPublic
+from hatchet_sdk.clients.rest.models.webhook_worker import WebhookWorker
+from hatchet_sdk.clients.rest.models.webhook_worker_create_request import (
+ WebhookWorkerCreateRequest,
+)
+from hatchet_sdk.clients.rest.models.webhook_worker_create_response import (
+ WebhookWorkerCreateResponse,
+)
+from hatchet_sdk.clients.rest.models.webhook_worker_created import WebhookWorkerCreated
+from hatchet_sdk.clients.rest.models.webhook_worker_list_response import (
+ WebhookWorkerListResponse,
+)
+from hatchet_sdk.clients.rest.models.webhook_worker_request import WebhookWorkerRequest
+from hatchet_sdk.clients.rest.models.webhook_worker_request_list_response import (
+ WebhookWorkerRequestListResponse,
+)
+from hatchet_sdk.clients.rest.models.webhook_worker_request_method import (
+ WebhookWorkerRequestMethod,
+)
+from hatchet_sdk.clients.rest.models.worker import Worker
+from hatchet_sdk.clients.rest.models.worker_label import WorkerLabel
+from hatchet_sdk.clients.rest.models.worker_list import WorkerList
+from hatchet_sdk.clients.rest.models.worker_runtime_info import WorkerRuntimeInfo
+from hatchet_sdk.clients.rest.models.worker_runtime_sdks import WorkerRuntimeSDKs
+from hatchet_sdk.clients.rest.models.worker_type import WorkerType
+from hatchet_sdk.clients.rest.models.workflow import Workflow
+from hatchet_sdk.clients.rest.models.workflow_concurrency import WorkflowConcurrency
+from hatchet_sdk.clients.rest.models.workflow_kind import WorkflowKind
+from hatchet_sdk.clients.rest.models.workflow_list import WorkflowList
+from hatchet_sdk.clients.rest.models.workflow_metrics import WorkflowMetrics
+from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun
+from hatchet_sdk.clients.rest.models.workflow_run_list import WorkflowRunList
+from hatchet_sdk.clients.rest.models.workflow_run_order_by_direction import (
+ WorkflowRunOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.workflow_run_order_by_field import (
+ WorkflowRunOrderByField,
+)
+from hatchet_sdk.clients.rest.models.workflow_run_shape import WorkflowRunShape
+from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus
+from hatchet_sdk.clients.rest.models.workflow_run_triggered_by import (
+ WorkflowRunTriggeredBy,
+)
+from hatchet_sdk.clients.rest.models.workflow_runs_cancel_request import (
+ WorkflowRunsCancelRequest,
+)
+from hatchet_sdk.clients.rest.models.workflow_runs_metrics import WorkflowRunsMetrics
+from hatchet_sdk.clients.rest.models.workflow_runs_metrics_counts import (
+ WorkflowRunsMetricsCounts,
+)
+from hatchet_sdk.clients.rest.models.workflow_tag import WorkflowTag
+from hatchet_sdk.clients.rest.models.workflow_trigger_cron_ref import (
+ WorkflowTriggerCronRef,
+)
+from hatchet_sdk.clients.rest.models.workflow_trigger_event_ref import (
+ WorkflowTriggerEventRef,
+)
+from hatchet_sdk.clients.rest.models.workflow_triggers import WorkflowTriggers
+from hatchet_sdk.clients.rest.models.workflow_update_request import (
+ WorkflowUpdateRequest,
+)
+from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion
+from hatchet_sdk.clients.rest.models.workflow_version_definition import (
+ WorkflowVersionDefinition,
+)
+from hatchet_sdk.clients.rest.models.workflow_version_meta import WorkflowVersionMeta
+from hatchet_sdk.clients.rest.models.workflow_workers_count import WorkflowWorkersCount
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/accept_invite_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/accept_invite_request.py
new file mode 100644
index 00000000..241bff8a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/accept_invite_request.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Annotated, Self
+
+
+class AcceptInviteRequest(BaseModel):
+ """
+ AcceptInviteRequest
+ """ # noqa: E501
+
+ invite: Annotated[str, Field(min_length=36, strict=True, max_length=36)]
+ __properties: ClassVar[List[str]] = ["invite"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of AcceptInviteRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of AcceptInviteRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"invite": obj.get("invite")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_error.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_error.py
new file mode 100644
index 00000000..64edc80f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_error.py
@@ -0,0 +1,102 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Self
+
+
+class APIError(BaseModel):
+ """
+ APIError
+ """ # noqa: E501
+
+ code: Optional[StrictInt] = Field(
+ default=None, description="a custom Hatchet error code"
+ )
+ var_field: Optional[StrictStr] = Field(
+ default=None,
+ description="the field that this error is associated with, if applicable",
+ alias="field",
+ )
+ description: StrictStr = Field(description="a description for this error")
+ docs_link: Optional[StrictStr] = Field(
+ default=None,
+ description="a link to the documentation for this error, if it exists",
+ )
+ __properties: ClassVar[List[str]] = ["code", "field", "description", "docs_link"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of APIError from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of APIError from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "code": obj.get("code"),
+ "field": obj.get("field"),
+ "description": obj.get("description"),
+ "docs_link": obj.get("docs_link"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_errors.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_errors.py
new file mode 100644
index 00000000..e4dfed11
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_errors.py
@@ -0,0 +1,100 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_error import APIError
+
+
+class APIErrors(BaseModel):
+ """
+ APIErrors
+ """ # noqa: E501
+
+ errors: List[APIError]
+ __properties: ClassVar[List[str]] = ["errors"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of APIErrors from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in errors (list)
+ _items = []
+ if self.errors:
+ for _item_errors in self.errors:
+ if _item_errors:
+ _items.append(_item_errors.to_dict())
+ _dict["errors"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of APIErrors from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "errors": (
+ [APIError.from_dict(_item) for _item in obj["errors"]]
+ if obj.get("errors") is not None
+ else None
+ )
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta.py
new file mode 100644
index 00000000..93c17f05
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta.py
@@ -0,0 +1,144 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_meta_auth import APIMetaAuth
+from hatchet_sdk.clients.rest.models.api_meta_posthog import APIMetaPosthog
+
+
+class APIMeta(BaseModel):
+ """
+ APIMeta
+ """ # noqa: E501
+
+ auth: Optional[APIMetaAuth] = None
+ pylon_app_id: Optional[StrictStr] = Field(
+ default=None,
+ description="the Pylon app ID for usepylon.com chat support",
+ alias="pylonAppId",
+ )
+ posthog: Optional[APIMetaPosthog] = None
+ allow_signup: Optional[StrictBool] = Field(
+ default=None,
+ description="whether or not users can sign up for this instance",
+ alias="allowSignup",
+ )
+ allow_invites: Optional[StrictBool] = Field(
+ default=None,
+ description="whether or not users can invite other users to this instance",
+ alias="allowInvites",
+ )
+ allow_create_tenant: Optional[StrictBool] = Field(
+ default=None,
+ description="whether or not users can create new tenants",
+ alias="allowCreateTenant",
+ )
+ allow_change_password: Optional[StrictBool] = Field(
+ default=None,
+ description="whether or not users can change their password",
+ alias="allowChangePassword",
+ )
+ __properties: ClassVar[List[str]] = [
+ "auth",
+ "pylonAppId",
+ "posthog",
+ "allowSignup",
+ "allowInvites",
+ "allowCreateTenant",
+ "allowChangePassword",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of APIMeta from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of auth
+ if self.auth:
+ _dict["auth"] = self.auth.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of posthog
+ if self.posthog:
+ _dict["posthog"] = self.posthog.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of APIMeta from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "auth": (
+ APIMetaAuth.from_dict(obj["auth"])
+ if obj.get("auth") is not None
+ else None
+ ),
+ "pylonAppId": obj.get("pylonAppId"),
+ "posthog": (
+ APIMetaPosthog.from_dict(obj["posthog"])
+ if obj.get("posthog") is not None
+ else None
+ ),
+ "allowSignup": obj.get("allowSignup"),
+ "allowInvites": obj.get("allowInvites"),
+ "allowCreateTenant": obj.get("allowCreateTenant"),
+ "allowChangePassword": obj.get("allowChangePassword"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta_auth.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta_auth.py
new file mode 100644
index 00000000..5ca29092
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta_auth.py
@@ -0,0 +1,85 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class APIMetaAuth(BaseModel):
+ """
+ APIMetaAuth
+ """ # noqa: E501
+
+ schemes: Optional[List[StrictStr]] = Field(
+ default=None, description="the supported types of authentication"
+ )
+ __properties: ClassVar[List[str]] = ["schemes"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of APIMetaAuth from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of APIMetaAuth from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"schemes": obj.get("schemes")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta_integration.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta_integration.py
new file mode 100644
index 00000000..f4f361c9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta_integration.py
@@ -0,0 +1,88 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
+from typing_extensions import Self
+
+
+class APIMetaIntegration(BaseModel):
+ """
+ APIMetaIntegration
+ """ # noqa: E501
+
+ name: StrictStr = Field(description="the name of the integration")
+ enabled: StrictBool = Field(
+ description="whether this integration is enabled on the instance"
+ )
+ __properties: ClassVar[List[str]] = ["name", "enabled"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of APIMetaIntegration from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of APIMetaIntegration from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {"name": obj.get("name"), "enabled": obj.get("enabled")}
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta_posthog.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta_posthog.py
new file mode 100644
index 00000000..da6052c2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_meta_posthog.py
@@ -0,0 +1,90 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class APIMetaPosthog(BaseModel):
+ """
+ APIMetaPosthog
+ """ # noqa: E501
+
+ api_key: Optional[StrictStr] = Field(
+ default=None, description="the PostHog API key", alias="apiKey"
+ )
+ api_host: Optional[StrictStr] = Field(
+ default=None, description="the PostHog API host", alias="apiHost"
+ )
+ __properties: ClassVar[List[str]] = ["apiKey", "apiHost"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of APIMetaPosthog from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of APIMetaPosthog from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {"apiKey": obj.get("apiKey"), "apiHost": obj.get("apiHost")}
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_resource_meta.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_resource_meta.py
new file mode 100644
index 00000000..8c353248
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_resource_meta.py
@@ -0,0 +1,98 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Annotated, Self
+
+
+class APIResourceMeta(BaseModel):
+ """
+ APIResourceMeta
+ """ # noqa: E501
+
+ id: Annotated[str, Field(min_length=0, strict=True, max_length=36)] = Field(
+ description="the id of this resource, in UUID format"
+ )
+ created_at: datetime = Field(
+ description="the time that this resource was created", alias="createdAt"
+ )
+ updated_at: datetime = Field(
+ description="the time that this resource was last updated", alias="updatedAt"
+ )
+ __properties: ClassVar[List[str]] = ["id", "createdAt", "updatedAt"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of APIResourceMeta from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of APIResourceMeta from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "id": obj.get("id"),
+ "createdAt": obj.get("createdAt"),
+ "updatedAt": obj.get("updatedAt"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_token.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_token.py
new file mode 100644
index 00000000..e469e3af
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/api_token.py
@@ -0,0 +1,105 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Annotated, Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class APIToken(BaseModel):
+ """
+ APIToken
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ name: Annotated[str, Field(strict=True, max_length=255)] = Field(
+ description="The name of the API token."
+ )
+ expires_at: datetime = Field(
+ description="When the API token expires.", alias="expiresAt"
+ )
+ __properties: ClassVar[List[str]] = ["metadata", "name", "expiresAt"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of APIToken from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of APIToken from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "name": obj.get("name"),
+ "expiresAt": obj.get("expiresAt"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/bulk_create_event_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/bulk_create_event_request.py
new file mode 100644
index 00000000..8d08d394
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/bulk_create_event_request.py
@@ -0,0 +1,100 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.create_event_request import CreateEventRequest
+
+
+class BulkCreateEventRequest(BaseModel):
+ """
+ BulkCreateEventRequest
+ """ # noqa: E501
+
+ events: List[CreateEventRequest]
+ __properties: ClassVar[List[str]] = ["events"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of BulkCreateEventRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in events (list)
+ _items = []
+ if self.events:
+ for _item_events in self.events:
+ if _item_events:
+ _items.append(_item_events.to_dict())
+ _dict["events"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of BulkCreateEventRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "events": (
+ [CreateEventRequest.from_dict(_item) for _item in obj["events"]]
+ if obj.get("events") is not None
+ else None
+ )
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/bulk_create_event_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/bulk_create_event_response.py
new file mode 100644
index 00000000..768c5c90
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/bulk_create_event_response.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.event import Event
+
+
+class BulkCreateEventResponse(BaseModel):
+ """
+ BulkCreateEventResponse
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ events: List[Event] = Field(description="The events.")
+ __properties: ClassVar[List[str]] = ["metadata", "events"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of BulkCreateEventResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in events (list)
+ _items = []
+ if self.events:
+ for _item_events in self.events:
+ if _item_events:
+ _items.append(_item_events.to_dict())
+ _dict["events"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of BulkCreateEventResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "events": (
+ [Event.from_dict(_item) for _item in obj["events"]]
+ if obj.get("events") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cancel_event_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cancel_event_request.py
new file mode 100644
index 00000000..3fa171ad
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cancel_event_request.py
@@ -0,0 +1,85 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Annotated, Self
+
+
+class CancelEventRequest(BaseModel):
+ """
+ CancelEventRequest
+ """ # noqa: E501
+
+ event_ids: List[
+ Annotated[str, Field(min_length=36, strict=True, max_length=36)]
+ ] = Field(alias="eventIds")
+ __properties: ClassVar[List[str]] = ["eventIds"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CancelEventRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CancelEventRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"eventIds": obj.get("eventIds")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cancel_step_run_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cancel_step_run_request.py
new file mode 100644
index 00000000..69e97297
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cancel_step_run_request.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+
+class CancelStepRunRequest(BaseModel):
+ """
+ CancelStepRunRequest
+ """ # noqa: E501
+
+ input: Dict[str, Any]
+ __properties: ClassVar[List[str]] = ["input"]
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CancelStepRunRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CancelStepRunRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"input": obj.get("input")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/concurrency_limit_strategy.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/concurrency_limit_strategy.py
new file mode 100644
index 00000000..74460b5e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/concurrency_limit_strategy.py
@@ -0,0 +1,39 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class ConcurrencyLimitStrategy(str, Enum):
+ """
+ ConcurrencyLimitStrategy
+ """
+
+ """
+ allowed enum values
+ """
+ CANCEL_IN_PROGRESS = "CANCEL_IN_PROGRESS"
+ DROP_NEWEST = "DROP_NEWEST"
+ QUEUE_NEWEST = "QUEUE_NEWEST"
+ GROUP_ROUND_ROBIN = "GROUP_ROUND_ROBIN"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of ConcurrencyLimitStrategy from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_api_token_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_api_token_request.py
new file mode 100644
index 00000000..5614ef0a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_api_token_request.py
@@ -0,0 +1,92 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Annotated, Self
+
+
+class CreateAPITokenRequest(BaseModel):
+ """
+ CreateAPITokenRequest
+ """ # noqa: E501
+
+ name: Annotated[str, Field(strict=True, max_length=255)] = Field(
+ description="A name for the API token."
+ )
+ expires_in: Optional[StrictStr] = Field(
+ default=None,
+ description="The duration for which the token is valid.",
+ alias="expiresIn",
+ )
+ __properties: ClassVar[List[str]] = ["name", "expiresIn"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CreateAPITokenRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CreateAPITokenRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {"name": obj.get("name"), "expiresIn": obj.get("expiresIn")}
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_api_token_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_api_token_response.py
new file mode 100644
index 00000000..2fb1e62d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_api_token_response.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class CreateAPITokenResponse(BaseModel):
+ """
+ CreateAPITokenResponse
+ """ # noqa: E501
+
+ token: StrictStr = Field(description="The API token.")
+ __properties: ClassVar[List[str]] = ["token"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CreateAPITokenResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CreateAPITokenResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"token": obj.get("token")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_cron_workflow_trigger_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_cron_workflow_trigger_request.py
new file mode 100644
index 00000000..e2b49df0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_cron_workflow_trigger_request.py
@@ -0,0 +1,98 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class CreateCronWorkflowTriggerRequest(BaseModel):
+ """
+ CreateCronWorkflowTriggerRequest
+ """ # noqa: E501
+
+ input: Dict[str, Any]
+ additional_metadata: Dict[str, Any] = Field(alias="additionalMetadata")
+ cron_name: StrictStr = Field(alias="cronName")
+ cron_expression: StrictStr = Field(alias="cronExpression")
+ __properties: ClassVar[List[str]] = [
+ "input",
+ "additionalMetadata",
+ "cronName",
+ "cronExpression",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CreateCronWorkflowTriggerRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CreateCronWorkflowTriggerRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "input": obj.get("input"),
+ "additionalMetadata": obj.get("additionalMetadata"),
+ "cronName": obj.get("cronName"),
+ "cronExpression": obj.get("cronExpression"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_event_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_event_request.py
new file mode 100644
index 00000000..adc37ce6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_event_request.py
@@ -0,0 +1,95 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class CreateEventRequest(BaseModel):
+ """
+ CreateEventRequest
+ """ # noqa: E501
+
+ key: StrictStr = Field(description="The key for the event.")
+ data: Dict[str, Any] = Field(description="The data for the event.")
+ additional_metadata: Optional[Dict[str, Any]] = Field(
+ default=None,
+ description="Additional metadata for the event.",
+ alias="additionalMetadata",
+ )
+ __properties: ClassVar[List[str]] = ["key", "data", "additionalMetadata"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CreateEventRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CreateEventRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "key": obj.get("key"),
+ "data": obj.get("data"),
+ "additionalMetadata": obj.get("additionalMetadata"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_pull_request_from_step_run.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_pull_request_from_step_run.py
new file mode 100644
index 00000000..0984ff06
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_pull_request_from_step_run.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class CreatePullRequestFromStepRun(BaseModel):
+ """
+ CreatePullRequestFromStepRun
+ """ # noqa: E501
+
+ branch_name: StrictStr = Field(alias="branchName")
+ __properties: ClassVar[List[str]] = ["branchName"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CreatePullRequestFromStepRun from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CreatePullRequestFromStepRun from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"branchName": obj.get("branchName")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_sns_integration_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_sns_integration_request.py
new file mode 100644
index 00000000..ddb76cd1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_sns_integration_request.py
@@ -0,0 +1,85 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class CreateSNSIntegrationRequest(BaseModel):
+ """
+ CreateSNSIntegrationRequest
+ """ # noqa: E501
+
+ topic_arn: StrictStr = Field(
+ description="The Amazon Resource Name (ARN) of the SNS topic.", alias="topicArn"
+ )
+ __properties: ClassVar[List[str]] = ["topicArn"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CreateSNSIntegrationRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CreateSNSIntegrationRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"topicArn": obj.get("topicArn")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_tenant_alert_email_group_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_tenant_alert_email_group_request.py
new file mode 100644
index 00000000..bc3a6953
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_tenant_alert_email_group_request.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class CreateTenantAlertEmailGroupRequest(BaseModel):
+ """
+ CreateTenantAlertEmailGroupRequest
+ """ # noqa: E501
+
+ emails: List[StrictStr] = Field(description="A list of emails for users")
+ __properties: ClassVar[List[str]] = ["emails"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CreateTenantAlertEmailGroupRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CreateTenantAlertEmailGroupRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"emails": obj.get("emails")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_tenant_invite_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_tenant_invite_request.py
new file mode 100644
index 00000000..83450b48
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_tenant_invite_request.py
@@ -0,0 +1,86 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.tenant_member_role import TenantMemberRole
+
+
+class CreateTenantInviteRequest(BaseModel):
+ """
+ CreateTenantInviteRequest
+ """ # noqa: E501
+
+ email: StrictStr = Field(description="The email of the user to invite.")
+ role: TenantMemberRole = Field(description="The role of the user in the tenant.")
+ __properties: ClassVar[List[str]] = ["email", "role"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CreateTenantInviteRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CreateTenantInviteRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"email": obj.get("email"), "role": obj.get("role")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_tenant_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_tenant_request.py
new file mode 100644
index 00000000..84946f57
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/create_tenant_request.py
@@ -0,0 +1,84 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class CreateTenantRequest(BaseModel):
+ """
+ CreateTenantRequest
+ """ # noqa: E501
+
+ name: StrictStr = Field(description="The name of the tenant.")
+ slug: StrictStr = Field(description="The slug of the tenant.")
+ __properties: ClassVar[List[str]] = ["name", "slug"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CreateTenantRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CreateTenantRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"name": obj.get("name"), "slug": obj.get("slug")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows.py
new file mode 100644
index 00000000..8005e407
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows.py
@@ -0,0 +1,131 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.cron_workflows_method import CronWorkflowsMethod
+
+
+class CronWorkflows(BaseModel):
+ """
+ CronWorkflows
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ tenant_id: StrictStr = Field(alias="tenantId")
+ workflow_version_id: StrictStr = Field(alias="workflowVersionId")
+ workflow_id: StrictStr = Field(alias="workflowId")
+ workflow_name: StrictStr = Field(alias="workflowName")
+ cron: StrictStr
+ name: Optional[StrictStr] = None
+ input: Optional[Dict[str, Any]] = None
+ additional_metadata: Optional[Dict[str, Any]] = Field(
+ default=None, alias="additionalMetadata"
+ )
+ enabled: StrictBool
+ method: CronWorkflowsMethod
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "tenantId",
+ "workflowVersionId",
+ "workflowId",
+ "workflowName",
+ "cron",
+ "name",
+ "input",
+ "additionalMetadata",
+ "enabled",
+ "method",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CronWorkflows from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CronWorkflows from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "tenantId": obj.get("tenantId"),
+ "workflowVersionId": obj.get("workflowVersionId"),
+ "workflowId": obj.get("workflowId"),
+ "workflowName": obj.get("workflowName"),
+ "cron": obj.get("cron"),
+ "name": obj.get("name"),
+ "input": obj.get("input"),
+ "additionalMetadata": obj.get("additionalMetadata"),
+ "enabled": obj.get("enabled"),
+ "method": obj.get("method"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows_list.py
new file mode 100644
index 00000000..b0ab6967
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+
+
+class CronWorkflowsList(BaseModel):
+ """
+ CronWorkflowsList
+ """ # noqa: E501
+
+ rows: Optional[List[CronWorkflows]] = None
+ pagination: Optional[PaginationResponse] = None
+ __properties: ClassVar[List[str]] = ["rows", "pagination"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of CronWorkflowsList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of CronWorkflowsList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "rows": (
+ [CronWorkflows.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows_method.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows_method.py
new file mode 100644
index 00000000..1f671fb5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows_method.py
@@ -0,0 +1,37 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class CronWorkflowsMethod(str, Enum):
+ """
+ CronWorkflowsMethod
+ """
+
+ """
+ allowed enum values
+ """
+ DEFAULT = "DEFAULT"
+ API = "API"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of CronWorkflowsMethod from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py
new file mode 100644
index 00000000..b95d199a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/cron_workflows_order_by_field.py
@@ -0,0 +1,37 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class CronWorkflowsOrderByField(str, Enum):
+ """
+ CronWorkflowsOrderByField
+ """
+
+ """
+ allowed enum values
+ """
+ NAME = "name"
+ CREATEDAT = "createdAt"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of CronWorkflowsOrderByField from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event.py
new file mode 100644
index 00000000..4838a3ec
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event.py
@@ -0,0 +1,143 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.event_workflow_run_summary import (
+ EventWorkflowRunSummary,
+)
+from hatchet_sdk.clients.rest.models.tenant import Tenant
+
+
+class Event(BaseModel):
+ """
+ Event
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ key: StrictStr = Field(description="The key for the event.")
+ tenant: Optional[Tenant] = Field(
+ default=None, description="The tenant associated with this event."
+ )
+ tenant_id: StrictStr = Field(
+ description="The ID of the tenant associated with this event.", alias="tenantId"
+ )
+ workflow_run_summary: Optional[EventWorkflowRunSummary] = Field(
+ default=None,
+ description="The workflow run summary for this event.",
+ alias="workflowRunSummary",
+ )
+ additional_metadata: Optional[Dict[str, Any]] = Field(
+ default=None,
+ description="Additional metadata for the event.",
+ alias="additionalMetadata",
+ )
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "key",
+ "tenant",
+ "tenantId",
+ "workflowRunSummary",
+ "additionalMetadata",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of Event from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of tenant
+ if self.tenant:
+ _dict["tenant"] = self.tenant.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of workflow_run_summary
+ if self.workflow_run_summary:
+ _dict["workflowRunSummary"] = self.workflow_run_summary.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of Event from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "key": obj.get("key"),
+ "tenant": (
+ Tenant.from_dict(obj["tenant"])
+ if obj.get("tenant") is not None
+ else None
+ ),
+ "tenantId": obj.get("tenantId"),
+ "workflowRunSummary": (
+ EventWorkflowRunSummary.from_dict(obj["workflowRunSummary"])
+ if obj.get("workflowRunSummary") is not None
+ else None
+ ),
+ "additionalMetadata": obj.get("additionalMetadata"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_data.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_data.py
new file mode 100644
index 00000000..eb0d6128
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_data.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class EventData(BaseModel):
+ """
+ EventData
+ """ # noqa: E501
+
+ data: StrictStr = Field(description="The data for the event (JSON bytes).")
+ __properties: ClassVar[List[str]] = ["data"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of EventData from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of EventData from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"data": obj.get("data")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_key_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_key_list.py
new file mode 100644
index 00000000..c56595ae
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_key_list.py
@@ -0,0 +1,98 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+
+
+class EventKeyList(BaseModel):
+ """
+ EventKeyList
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[StrictStr]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of EventKeyList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of EventKeyList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": obj.get("rows"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_list.py
new file mode 100644
index 00000000..5c928005
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.event import Event
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+
+
+class EventList(BaseModel):
+ """
+ EventList
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[Event]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of EventList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of EventList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [Event.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_order_by_direction.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_order_by_direction.py
new file mode 100644
index 00000000..24255e0d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_order_by_direction.py
@@ -0,0 +1,37 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class EventOrderByDirection(str, Enum):
+ """
+ EventOrderByDirection
+ """
+
+ """
+ allowed enum values
+ """
+ ASC = "asc"
+ DESC = "desc"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of EventOrderByDirection from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_order_by_field.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_order_by_field.py
new file mode 100644
index 00000000..da2193c3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_order_by_field.py
@@ -0,0 +1,36 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class EventOrderByField(str, Enum):
+ """
+ EventOrderByField
+ """
+
+ """
+ allowed enum values
+ """
+ CREATEDAT = "createdAt"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of EventOrderByField from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_update_cancel200_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_update_cancel200_response.py
new file mode 100644
index 00000000..6723ec30
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_update_cancel200_response.py
@@ -0,0 +1,85 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Annotated, Self
+
+
+class EventUpdateCancel200Response(BaseModel):
+ """
+ EventUpdateCancel200Response
+ """ # noqa: E501
+
+ workflow_run_ids: Optional[
+ List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]]
+ ] = Field(default=None, alias="workflowRunIds")
+ __properties: ClassVar[List[str]] = ["workflowRunIds"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of EventUpdateCancel200Response from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of EventUpdateCancel200Response from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"workflowRunIds": obj.get("workflowRunIds")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_workflow_run_summary.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_workflow_run_summary.py
new file mode 100644
index 00000000..c378441b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/event_workflow_run_summary.py
@@ -0,0 +1,116 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt
+from typing_extensions import Self
+
+
+class EventWorkflowRunSummary(BaseModel):
+ """
+ EventWorkflowRunSummary
+ """ # noqa: E501
+
+ pending: Optional[StrictInt] = Field(
+ default=None, description="The number of pending runs."
+ )
+ running: Optional[StrictInt] = Field(
+ default=None, description="The number of running runs."
+ )
+ queued: Optional[StrictInt] = Field(
+ default=None, description="The number of queued runs."
+ )
+ succeeded: Optional[StrictInt] = Field(
+ default=None, description="The number of succeeded runs."
+ )
+ failed: Optional[StrictInt] = Field(
+ default=None, description="The number of failed runs."
+ )
+ cancelled: Optional[StrictInt] = Field(
+ default=None, description="The number of cancelled runs."
+ )
+ __properties: ClassVar[List[str]] = [
+ "pending",
+ "running",
+ "queued",
+ "succeeded",
+ "failed",
+ "cancelled",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of EventWorkflowRunSummary from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of EventWorkflowRunSummary from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pending": obj.get("pending"),
+ "running": obj.get("running"),
+ "queued": obj.get("queued"),
+ "succeeded": obj.get("succeeded"),
+ "failed": obj.get("failed"),
+ "cancelled": obj.get("cancelled"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/events.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/events.py
new file mode 100644
index 00000000..ec8c1cd1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/events.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.event import Event
+
+
+class Events(BaseModel):
+ """
+ Events
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ events: List[Event] = Field(description="The events.")
+ __properties: ClassVar[List[str]] = ["metadata", "events"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of Events from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in events (list)
+ _items = []
+ if self.events:
+ for _item_events in self.events:
+ if _item_events:
+ _items.append(_item_events.to_dict())
+ _dict["events"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of Events from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "events": (
+ [Event.from_dict(_item) for _item in obj["events"]]
+ if obj.get("events") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/get_step_run_diff_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/get_step_run_diff_response.py
new file mode 100644
index 00000000..b9dbc435
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/get_step_run_diff_response.py
@@ -0,0 +1,100 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.step_run_diff import StepRunDiff
+
+
+class GetStepRunDiffResponse(BaseModel):
+ """
+ GetStepRunDiffResponse
+ """ # noqa: E501
+
+ diffs: List[StepRunDiff]
+ __properties: ClassVar[List[str]] = ["diffs"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of GetStepRunDiffResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in diffs (list)
+ _items = []
+ if self.diffs:
+ for _item_diffs in self.diffs:
+ if _item_diffs:
+ _items.append(_item_diffs.to_dict())
+ _dict["diffs"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of GetStepRunDiffResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "diffs": (
+ [StepRunDiff.from_dict(_item) for _item in obj["diffs"]]
+ if obj.get("diffs") is not None
+ else None
+ )
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/github_app_installation.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/github_app_installation.py
new file mode 100644
index 00000000..35bbc230
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/github_app_installation.py
@@ -0,0 +1,107 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class GithubAppInstallation(BaseModel):
+ """
+ GithubAppInstallation
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ installation_settings_url: StrictStr
+ account_name: StrictStr
+ account_avatar_url: StrictStr
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "installation_settings_url",
+ "account_name",
+ "account_avatar_url",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of GithubAppInstallation from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of GithubAppInstallation from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "installation_settings_url": obj.get("installation_settings_url"),
+ "account_name": obj.get("account_name"),
+ "account_avatar_url": obj.get("account_avatar_url"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/github_branch.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/github_branch.py
new file mode 100644
index 00000000..16501da3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/github_branch.py
@@ -0,0 +1,86 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, StrictBool, StrictStr
+from typing_extensions import Self
+
+
+class GithubBranch(BaseModel):
+ """
+ GithubBranch
+ """ # noqa: E501
+
+ branch_name: StrictStr
+ is_default: StrictBool
+ __properties: ClassVar[List[str]] = ["branch_name", "is_default"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of GithubBranch from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of GithubBranch from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {"branch_name": obj.get("branch_name"), "is_default": obj.get("is_default")}
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/github_repo.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/github_repo.py
new file mode 100644
index 00000000..3bc4d179
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/github_repo.py
@@ -0,0 +1,86 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, StrictStr
+from typing_extensions import Self
+
+
+class GithubRepo(BaseModel):
+ """
+ GithubRepo
+ """ # noqa: E501
+
+ repo_owner: StrictStr
+ repo_name: StrictStr
+ __properties: ClassVar[List[str]] = ["repo_owner", "repo_name"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of GithubRepo from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of GithubRepo from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {"repo_owner": obj.get("repo_owner"), "repo_name": obj.get("repo_name")}
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/info_get_version200_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/info_get_version200_response.py
new file mode 100644
index 00000000..93100ac4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/info_get_version200_response.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, StrictStr
+from typing_extensions import Self
+
+
+class InfoGetVersion200Response(BaseModel):
+ """
+ InfoGetVersion200Response
+ """ # noqa: E501
+
+ version: StrictStr
+ __properties: ClassVar[List[str]] = ["version"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of InfoGetVersion200Response from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of InfoGetVersion200Response from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"version": obj.get("version")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/job.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/job.py
new file mode 100644
index 00000000..c412ef68
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/job.py
@@ -0,0 +1,132 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.step import Step
+
+
+class Job(BaseModel):
+ """
+ Job
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ tenant_id: StrictStr = Field(alias="tenantId")
+ version_id: StrictStr = Field(alias="versionId")
+ name: StrictStr
+ description: Optional[StrictStr] = Field(
+ default=None, description="The description of the job."
+ )
+ steps: List[Step]
+ timeout: Optional[StrictStr] = Field(
+ default=None, description="The timeout of the job."
+ )
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "tenantId",
+ "versionId",
+ "name",
+ "description",
+ "steps",
+ "timeout",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of Job from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in steps (list)
+ _items = []
+ if self.steps:
+ for _item_steps in self.steps:
+ if _item_steps:
+ _items.append(_item_steps.to_dict())
+ _dict["steps"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of Job from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "tenantId": obj.get("tenantId"),
+ "versionId": obj.get("versionId"),
+ "name": obj.get("name"),
+ "description": obj.get("description"),
+ "steps": (
+ [Step.from_dict(_item) for _item in obj["steps"]]
+ if obj.get("steps") is not None
+ else None
+ ),
+ "timeout": obj.get("timeout"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/job_run.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/job_run.py
new file mode 100644
index 00000000..3a7ec051
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/job_run.py
@@ -0,0 +1,176 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.job import Job
+from hatchet_sdk.clients.rest.models.job_run_status import JobRunStatus
+
+
+class JobRun(BaseModel):
+ """
+ JobRun
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ tenant_id: StrictStr = Field(alias="tenantId")
+ workflow_run_id: StrictStr = Field(alias="workflowRunId")
+ workflow_run: Optional[WorkflowRun] = Field(default=None, alias="workflowRun")
+ job_id: StrictStr = Field(alias="jobId")
+ job: Optional[Job] = None
+ ticker_id: Optional[StrictStr] = Field(default=None, alias="tickerId")
+ step_runs: Optional[List[StepRun]] = Field(default=None, alias="stepRuns")
+ status: JobRunStatus
+ result: Optional[Dict[str, Any]] = None
+ started_at: Optional[datetime] = Field(default=None, alias="startedAt")
+ finished_at: Optional[datetime] = Field(default=None, alias="finishedAt")
+ timeout_at: Optional[datetime] = Field(default=None, alias="timeoutAt")
+ cancelled_at: Optional[datetime] = Field(default=None, alias="cancelledAt")
+ cancelled_reason: Optional[StrictStr] = Field(default=None, alias="cancelledReason")
+ cancelled_error: Optional[StrictStr] = Field(default=None, alias="cancelledError")
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "tenantId",
+ "workflowRunId",
+ "workflowRun",
+ "jobId",
+ "job",
+ "tickerId",
+ "stepRuns",
+ "status",
+ "result",
+ "startedAt",
+ "finishedAt",
+ "timeoutAt",
+ "cancelledAt",
+ "cancelledReason",
+ "cancelledError",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of JobRun from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of workflow_run
+ if self.workflow_run:
+ _dict["workflowRun"] = self.workflow_run.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of job
+ if self.job:
+ _dict["job"] = self.job.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in step_runs (list)
+ _items = []
+ if self.step_runs:
+ for _item_step_runs in self.step_runs:
+ if _item_step_runs:
+ _items.append(_item_step_runs.to_dict())
+ _dict["stepRuns"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of JobRun from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "tenantId": obj.get("tenantId"),
+ "workflowRunId": obj.get("workflowRunId"),
+ "workflowRun": (
+ WorkflowRun.from_dict(obj["workflowRun"])
+ if obj.get("workflowRun") is not None
+ else None
+ ),
+ "jobId": obj.get("jobId"),
+ "job": (
+ Job.from_dict(obj["job"]) if obj.get("job") is not None else None
+ ),
+ "tickerId": obj.get("tickerId"),
+ "stepRuns": (
+ [StepRun.from_dict(_item) for _item in obj["stepRuns"]]
+ if obj.get("stepRuns") is not None
+ else None
+ ),
+ "status": obj.get("status"),
+ "result": obj.get("result"),
+ "startedAt": obj.get("startedAt"),
+ "finishedAt": obj.get("finishedAt"),
+ "timeoutAt": obj.get("timeoutAt"),
+ "cancelledAt": obj.get("cancelledAt"),
+ "cancelledReason": obj.get("cancelledReason"),
+ "cancelledError": obj.get("cancelledError"),
+ }
+ )
+ return _obj
+
+
+from hatchet_sdk.clients.rest.models.step_run import StepRun
+from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun
+
+# TODO: Rewrite to not use raise_errors
+JobRun.model_rebuild(raise_errors=False)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/job_run_status.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/job_run_status.py
new file mode 100644
index 00000000..68a14711
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/job_run_status.py
@@ -0,0 +1,41 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class JobRunStatus(str, Enum):
+ """
+ JobRunStatus
+ """
+
+ """
+ allowed enum values
+ """
+ PENDING = "PENDING"
+ RUNNING = "RUNNING"
+ SUCCEEDED = "SUCCEEDED"
+ FAILED = "FAILED"
+ CANCELLED = "CANCELLED"
+ BACKOFF = "BACKOFF"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of JobRunStatus from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/link_github_repository_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/link_github_repository_request.py
new file mode 100644
index 00000000..e5ddf6e5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/link_github_repository_request.py
@@ -0,0 +1,106 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Annotated, Self
+
+
+class LinkGithubRepositoryRequest(BaseModel):
+ """
+ LinkGithubRepositoryRequest
+ """ # noqa: E501
+
+ installation_id: Annotated[
+ str, Field(min_length=36, strict=True, max_length=36)
+ ] = Field(description="The repository name.", alias="installationId")
+ git_repo_name: StrictStr = Field(
+ description="The repository name.", alias="gitRepoName"
+ )
+ git_repo_owner: StrictStr = Field(
+ description="The repository owner.", alias="gitRepoOwner"
+ )
+ git_repo_branch: StrictStr = Field(
+ description="The repository branch.", alias="gitRepoBranch"
+ )
+ __properties: ClassVar[List[str]] = [
+ "installationId",
+ "gitRepoName",
+ "gitRepoOwner",
+ "gitRepoBranch",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of LinkGithubRepositoryRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of LinkGithubRepositoryRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "installationId": obj.get("installationId"),
+ "gitRepoName": obj.get("gitRepoName"),
+ "gitRepoOwner": obj.get("gitRepoOwner"),
+ "gitRepoBranch": obj.get("gitRepoBranch"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_api_tokens_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_api_tokens_response.py
new file mode 100644
index 00000000..b3590ab3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_api_tokens_response.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_token import APIToken
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+
+
+class ListAPITokensResponse(BaseModel):
+ """
+ ListAPITokensResponse
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[APIToken]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of ListAPITokensResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of ListAPITokensResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [APIToken.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_github_app_installations_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_github_app_installations_response.py
new file mode 100644
index 00000000..401fc266
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_github_app_installations_response.py
@@ -0,0 +1,112 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.github_app_installation import (
+ GithubAppInstallation,
+)
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+
+
+class ListGithubAppInstallationsResponse(BaseModel):
+ """
+ ListGithubAppInstallationsResponse
+ """ # noqa: E501
+
+ pagination: PaginationResponse
+ rows: List[GithubAppInstallation]
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of ListGithubAppInstallationsResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item in self.rows:
+ if _item:
+ _items.append(_item.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of ListGithubAppInstallationsResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [GithubAppInstallation.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_pull_requests_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_pull_requests_response.py
new file mode 100644
index 00000000..589d4c45
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_pull_requests_response.py
@@ -0,0 +1,100 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pull_request import PullRequest
+
+
+class ListPullRequestsResponse(BaseModel):
+ """
+ ListPullRequestsResponse
+ """ # noqa: E501
+
+ pull_requests: List[PullRequest] = Field(alias="pullRequests")
+ __properties: ClassVar[List[str]] = ["pullRequests"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of ListPullRequestsResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in pull_requests (list)
+ _items = []
+ if self.pull_requests:
+ for _item_pull_requests in self.pull_requests:
+ if _item_pull_requests:
+ _items.append(_item_pull_requests.to_dict())
+ _dict["pullRequests"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of ListPullRequestsResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pullRequests": (
+ [PullRequest.from_dict(_item) for _item in obj["pullRequests"]]
+ if obj.get("pullRequests") is not None
+ else None
+ )
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_slack_webhooks.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_slack_webhooks.py
new file mode 100644
index 00000000..e86956d3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_slack_webhooks.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.slack_webhook import SlackWebhook
+
+
+class ListSlackWebhooks(BaseModel):
+ """
+ ListSlackWebhooks
+ """ # noqa: E501
+
+ pagination: PaginationResponse
+ rows: List[SlackWebhook]
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of ListSlackWebhooks from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of ListSlackWebhooks from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [SlackWebhook.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_sns_integrations.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_sns_integrations.py
new file mode 100644
index 00000000..130e9127
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/list_sns_integrations.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.sns_integration import SNSIntegration
+
+
+class ListSNSIntegrations(BaseModel):
+ """
+ ListSNSIntegrations
+ """ # noqa: E501
+
+ pagination: PaginationResponse
+ rows: List[SNSIntegration]
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of ListSNSIntegrations from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of ListSNSIntegrations from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [SNSIntegration.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line.py
new file mode 100644
index 00000000..ee4299cf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line.py
@@ -0,0 +1,94 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class LogLine(BaseModel):
+ """
+ LogLine
+ """ # noqa: E501
+
+ created_at: datetime = Field(
+ description="The creation date of the log line.", alias="createdAt"
+ )
+ message: StrictStr = Field(description="The log message.")
+ metadata: Dict[str, Any] = Field(description="The log metadata.")
+ __properties: ClassVar[List[str]] = ["createdAt", "message", "metadata"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of LogLine from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of LogLine from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "createdAt": obj.get("createdAt"),
+ "message": obj.get("message"),
+ "metadata": obj.get("metadata"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_level.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_level.py
new file mode 100644
index 00000000..63fbec41
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_level.py
@@ -0,0 +1,39 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class LogLineLevel(str, Enum):
+ """
+ LogLineLevel
+ """
+
+ """
+ allowed enum values
+ """
+ DEBUG = "DEBUG"
+ INFO = "INFO"
+ WARN = "WARN"
+ ERROR = "ERROR"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of LogLineLevel from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_list.py
new file mode 100644
index 00000000..e05d186a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.log_line import LogLine
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+
+
+class LogLineList(BaseModel):
+ """
+ LogLineList
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[LogLine]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of LogLineList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of LogLineList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [LogLine.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_order_by_direction.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_order_by_direction.py
new file mode 100644
index 00000000..5f66f59b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_order_by_direction.py
@@ -0,0 +1,37 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class LogLineOrderByDirection(str, Enum):
+ """
+ LogLineOrderByDirection
+ """
+
+ """
+ allowed enum values
+ """
+ ASC = "asc"
+ DESC = "desc"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of LogLineOrderByDirection from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_order_by_field.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_order_by_field.py
new file mode 100644
index 00000000..93b92526
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/log_line_order_by_field.py
@@ -0,0 +1,36 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class LogLineOrderByField(str, Enum):
+ """
+ LogLineOrderByField
+ """
+
+ """
+ allowed enum values
+ """
+ CREATEDAT = "createdAt"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of LogLineOrderByField from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/pagination_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/pagination_response.py
new file mode 100644
index 00000000..2994dee9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/pagination_response.py
@@ -0,0 +1,95 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt
+from typing_extensions import Self
+
+
+class PaginationResponse(BaseModel):
+ """
+ PaginationResponse
+ """ # noqa: E501
+
+ current_page: Optional[StrictInt] = Field(
+ default=None, description="the current page"
+ )
+ next_page: Optional[StrictInt] = Field(default=None, description="the next page")
+ num_pages: Optional[StrictInt] = Field(
+ default=None, description="the total number of pages for listing"
+ )
+ __properties: ClassVar[List[str]] = ["current_page", "next_page", "num_pages"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of PaginationResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of PaginationResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "current_page": obj.get("current_page"),
+ "next_page": obj.get("next_page"),
+ "num_pages": obj.get("num_pages"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/pull_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/pull_request.py
new file mode 100644
index 00000000..c1462591
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/pull_request.py
@@ -0,0 +1,112 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pull_request_state import PullRequestState
+
+
+class PullRequest(BaseModel):
+ """
+ PullRequest
+ """ # noqa: E501
+
+ repository_owner: StrictStr = Field(alias="repositoryOwner")
+ repository_name: StrictStr = Field(alias="repositoryName")
+ pull_request_id: StrictInt = Field(alias="pullRequestID")
+ pull_request_title: StrictStr = Field(alias="pullRequestTitle")
+ pull_request_number: StrictInt = Field(alias="pullRequestNumber")
+ pull_request_head_branch: StrictStr = Field(alias="pullRequestHeadBranch")
+ pull_request_base_branch: StrictStr = Field(alias="pullRequestBaseBranch")
+ pull_request_state: PullRequestState = Field(alias="pullRequestState")
+ __properties: ClassVar[List[str]] = [
+ "repositoryOwner",
+ "repositoryName",
+ "pullRequestID",
+ "pullRequestTitle",
+ "pullRequestNumber",
+ "pullRequestHeadBranch",
+ "pullRequestBaseBranch",
+ "pullRequestState",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of PullRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of PullRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "repositoryOwner": obj.get("repositoryOwner"),
+ "repositoryName": obj.get("repositoryName"),
+ "pullRequestID": obj.get("pullRequestID"),
+ "pullRequestTitle": obj.get("pullRequestTitle"),
+ "pullRequestNumber": obj.get("pullRequestNumber"),
+ "pullRequestHeadBranch": obj.get("pullRequestHeadBranch"),
+ "pullRequestBaseBranch": obj.get("pullRequestBaseBranch"),
+ "pullRequestState": obj.get("pullRequestState"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/pull_request_state.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/pull_request_state.py
new file mode 100644
index 00000000..a44d06cc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/pull_request_state.py
@@ -0,0 +1,37 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class PullRequestState(str, Enum):
+ """
+ PullRequestState
+ """
+
+ """
+ allowed enum values
+ """
+ OPEN = "open"
+ CLOSED = "closed"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of PullRequestState from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/queue_metrics.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/queue_metrics.py
new file mode 100644
index 00000000..d19066dd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/queue_metrics.py
@@ -0,0 +1,97 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt
+from typing_extensions import Self
+
+
+class QueueMetrics(BaseModel):
+ """
+ QueueMetrics
+ """ # noqa: E501
+
+ num_queued: StrictInt = Field(
+ description="The number of items in the queue.", alias="numQueued"
+ )
+ num_running: StrictInt = Field(
+ description="The number of items running.", alias="numRunning"
+ )
+ num_pending: StrictInt = Field(
+ description="The number of items pending.", alias="numPending"
+ )
+ __properties: ClassVar[List[str]] = ["numQueued", "numRunning", "numPending"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of QueueMetrics from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of QueueMetrics from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "numQueued": obj.get("numQueued"),
+ "numRunning": obj.get("numRunning"),
+ "numPending": obj.get("numPending"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit.py
new file mode 100644
index 00000000..0bf88522
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit.py
@@ -0,0 +1,117 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Self
+
+
+class RateLimit(BaseModel):
+ """
+ RateLimit
+ """ # noqa: E501
+
+ key: StrictStr = Field(description="The key for the rate limit.")
+ tenant_id: StrictStr = Field(
+ description="The ID of the tenant associated with this rate limit.",
+ alias="tenantId",
+ )
+ limit_value: StrictInt = Field(
+ description="The maximum number of requests allowed within the window.",
+ alias="limitValue",
+ )
+ value: StrictInt = Field(
+ description="The current number of requests made within the window."
+ )
+ window: StrictStr = Field(
+ description="The window of time in which the limitValue is enforced."
+ )
+ last_refill: datetime = Field(
+ description="The last time the rate limit was refilled.", alias="lastRefill"
+ )
+ __properties: ClassVar[List[str]] = [
+ "key",
+ "tenantId",
+ "limitValue",
+ "value",
+ "window",
+ "lastRefill",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of RateLimit from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of RateLimit from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "key": obj.get("key"),
+ "tenantId": obj.get("tenantId"),
+ "limitValue": obj.get("limitValue"),
+ "value": obj.get("value"),
+ "window": obj.get("window"),
+ "lastRefill": obj.get("lastRefill"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit_list.py
new file mode 100644
index 00000000..e9f2847d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.rate_limit import RateLimit
+
+
+class RateLimitList(BaseModel):
+ """
+ RateLimitList
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[RateLimit]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of RateLimitList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of RateLimitList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [RateLimit.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit_order_by_direction.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit_order_by_direction.py
new file mode 100644
index 00000000..64451da9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit_order_by_direction.py
@@ -0,0 +1,37 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class RateLimitOrderByDirection(str, Enum):
+ """
+ RateLimitOrderByDirection
+ """
+
+ """
+ allowed enum values
+ """
+ ASC = "asc"
+ DESC = "desc"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of RateLimitOrderByDirection from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit_order_by_field.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit_order_by_field.py
new file mode 100644
index 00000000..6b5077be
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rate_limit_order_by_field.py
@@ -0,0 +1,38 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class RateLimitOrderByField(str, Enum):
+ """
+ RateLimitOrderByField
+ """
+
+ """
+ allowed enum values
+ """
+ KEY = "key"
+ VALUE = "value"
+ LIMITVALUE = "limitValue"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of RateLimitOrderByField from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/recent_step_runs.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/recent_step_runs.py
new file mode 100644
index 00000000..9b8a8249
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/recent_step_runs.py
@@ -0,0 +1,118 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.step_run_status import StepRunStatus
+
+
+class RecentStepRuns(BaseModel):
+ """
+ RecentStepRuns
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ action_id: StrictStr = Field(description="The action id.", alias="actionId")
+ status: StepRunStatus
+ started_at: Optional[datetime] = Field(default=None, alias="startedAt")
+ finished_at: Optional[datetime] = Field(default=None, alias="finishedAt")
+ cancelled_at: Optional[datetime] = Field(default=None, alias="cancelledAt")
+ workflow_run_id: StrictStr = Field(alias="workflowRunId")
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "actionId",
+ "status",
+ "startedAt",
+ "finishedAt",
+ "cancelledAt",
+ "workflowRunId",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of RecentStepRuns from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of RecentStepRuns from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "actionId": obj.get("actionId"),
+ "status": obj.get("status"),
+ "startedAt": obj.get("startedAt"),
+ "finishedAt": obj.get("finishedAt"),
+ "cancelledAt": obj.get("cancelledAt"),
+ "workflowRunId": obj.get("workflowRunId"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/reject_invite_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/reject_invite_request.py
new file mode 100644
index 00000000..13399345
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/reject_invite_request.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Annotated, Self
+
+
+class RejectInviteRequest(BaseModel):
+ """
+ RejectInviteRequest
+ """ # noqa: E501
+
+ invite: Annotated[str, Field(min_length=36, strict=True, max_length=36)]
+ __properties: ClassVar[List[str]] = ["invite"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of RejectInviteRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of RejectInviteRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"invite": obj.get("invite")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/replay_event_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/replay_event_request.py
new file mode 100644
index 00000000..0a5ef723
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/replay_event_request.py
@@ -0,0 +1,85 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Annotated, Self
+
+
+class ReplayEventRequest(BaseModel):
+ """
+ ReplayEventRequest
+ """ # noqa: E501
+
+ event_ids: List[
+ Annotated[str, Field(min_length=36, strict=True, max_length=36)]
+ ] = Field(alias="eventIds")
+ __properties: ClassVar[List[str]] = ["eventIds"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of ReplayEventRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of ReplayEventRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"eventIds": obj.get("eventIds")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/replay_workflow_runs_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/replay_workflow_runs_request.py
new file mode 100644
index 00000000..de5b2797
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/replay_workflow_runs_request.py
@@ -0,0 +1,85 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Annotated, Self
+
+
+class ReplayWorkflowRunsRequest(BaseModel):
+ """
+ ReplayWorkflowRunsRequest
+ """ # noqa: E501
+
+ workflow_run_ids: List[
+ Annotated[str, Field(min_length=36, strict=True, max_length=36)]
+ ] = Field(alias="workflowRunIds")
+ __properties: ClassVar[List[str]] = ["workflowRunIds"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of ReplayWorkflowRunsRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of ReplayWorkflowRunsRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"workflowRunIds": obj.get("workflowRunIds")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py
new file mode 100644
index 00000000..d8a9609d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/replay_workflow_runs_response.py
@@ -0,0 +1,100 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun
+
+
+class ReplayWorkflowRunsResponse(BaseModel):
+ """
+ ReplayWorkflowRunsResponse
+ """ # noqa: E501
+
+ workflow_runs: List[WorkflowRun] = Field(alias="workflowRuns")
+ __properties: ClassVar[List[str]] = ["workflowRuns"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of ReplayWorkflowRunsResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in workflow_runs (list)
+ _items = []
+ if self.workflow_runs:
+ for _item_workflow_runs in self.workflow_runs:
+ if _item_workflow_runs:
+ _items.append(_item_workflow_runs.to_dict())
+ _dict["workflowRuns"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of ReplayWorkflowRunsResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "workflowRuns": (
+ [WorkflowRun.from_dict(_item) for _item in obj["workflowRuns"]]
+ if obj.get("workflowRuns") is not None
+ else None
+ )
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rerun_step_run_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rerun_step_run_request.py
new file mode 100644
index 00000000..f8b28066
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/rerun_step_run_request.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+
+class RerunStepRunRequest(BaseModel):
+ """
+ RerunStepRunRequest
+ """ # noqa: E501
+
+ input: Dict[str, Any]
+ __properties: ClassVar[List[str]] = ["input"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of RerunStepRunRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of RerunStepRunRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"input": obj.get("input")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/schedule_workflow_run_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/schedule_workflow_run_request.py
new file mode 100644
index 00000000..c5b4753f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/schedule_workflow_run_request.py
@@ -0,0 +1,92 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Self
+
+
+class ScheduleWorkflowRunRequest(BaseModel):
+ """
+ ScheduleWorkflowRunRequest
+ """ # noqa: E501
+
+ input: Dict[str, Any]
+ additional_metadata: Dict[str, Any] = Field(alias="additionalMetadata")
+ trigger_at: datetime = Field(alias="triggerAt")
+ __properties: ClassVar[List[str]] = ["input", "additionalMetadata", "triggerAt"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of ScheduleWorkflowRunRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of ScheduleWorkflowRunRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "input": obj.get("input"),
+ "additionalMetadata": obj.get("additionalMetadata"),
+ "triggerAt": obj.get("triggerAt"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_run_status.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_run_status.py
new file mode 100644
index 00000000..f0f2a17f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_run_status.py
@@ -0,0 +1,42 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class ScheduledRunStatus(str, Enum):
+ """
+ ScheduledRunStatus
+ """
+
+ """
+ allowed enum values
+ """
+ PENDING = "PENDING"
+ RUNNING = "RUNNING"
+ SUCCEEDED = "SUCCEEDED"
+ FAILED = "FAILED"
+ CANCELLED = "CANCELLED"
+ QUEUED = "QUEUED"
+ SCHEDULED = "SCHEDULED"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of ScheduledRunStatus from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows.py
new file mode 100644
index 00000000..95bf7169
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows.py
@@ -0,0 +1,149 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Annotated, Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.scheduled_workflows_method import (
+ ScheduledWorkflowsMethod,
+)
+from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus
+
+
+class ScheduledWorkflows(BaseModel):
+ """
+ ScheduledWorkflows
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ tenant_id: StrictStr = Field(alias="tenantId")
+ workflow_version_id: StrictStr = Field(alias="workflowVersionId")
+ workflow_id: StrictStr = Field(alias="workflowId")
+ workflow_name: StrictStr = Field(alias="workflowName")
+ trigger_at: datetime = Field(alias="triggerAt")
+ input: Optional[Dict[str, Any]] = None
+ additional_metadata: Optional[Dict[str, Any]] = Field(
+ default=None, alias="additionalMetadata"
+ )
+ workflow_run_created_at: Optional[datetime] = Field(
+ default=None, alias="workflowRunCreatedAt"
+ )
+ workflow_run_name: Optional[StrictStr] = Field(
+ default=None, alias="workflowRunName"
+ )
+ workflow_run_status: Optional[WorkflowRunStatus] = Field(
+ default=None, alias="workflowRunStatus"
+ )
+ workflow_run_id: Optional[
+ Annotated[str, Field(min_length=36, strict=True, max_length=36)]
+ ] = Field(default=None, alias="workflowRunId")
+ method: ScheduledWorkflowsMethod
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "tenantId",
+ "workflowVersionId",
+ "workflowId",
+ "workflowName",
+ "triggerAt",
+ "input",
+ "additionalMetadata",
+ "workflowRunCreatedAt",
+ "workflowRunName",
+ "workflowRunStatus",
+ "workflowRunId",
+ "method",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of ScheduledWorkflows from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of ScheduledWorkflows from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "tenantId": obj.get("tenantId"),
+ "workflowVersionId": obj.get("workflowVersionId"),
+ "workflowId": obj.get("workflowId"),
+ "workflowName": obj.get("workflowName"),
+ "triggerAt": obj.get("triggerAt"),
+ "input": obj.get("input"),
+ "additionalMetadata": obj.get("additionalMetadata"),
+ "workflowRunCreatedAt": obj.get("workflowRunCreatedAt"),
+ "workflowRunName": obj.get("workflowRunName"),
+ "workflowRunStatus": obj.get("workflowRunStatus"),
+ "workflowRunId": obj.get("workflowRunId"),
+ "method": obj.get("method"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows_list.py
new file mode 100644
index 00000000..67468b5e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows
+
+
+class ScheduledWorkflowsList(BaseModel):
+ """
+ ScheduledWorkflowsList
+ """ # noqa: E501
+
+ rows: Optional[List[ScheduledWorkflows]] = None
+ pagination: Optional[PaginationResponse] = None
+ __properties: ClassVar[List[str]] = ["rows", "pagination"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of ScheduledWorkflowsList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of ScheduledWorkflowsList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "rows": (
+ [ScheduledWorkflows.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows_method.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows_method.py
new file mode 100644
index 00000000..f7016461
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows_method.py
@@ -0,0 +1,37 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class ScheduledWorkflowsMethod(str, Enum):
+ """
+ ScheduledWorkflowsMethod
+ """
+
+ """
+ allowed enum values
+ """
+ DEFAULT = "DEFAULT"
+ API = "API"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of ScheduledWorkflowsMethod from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py
new file mode 100644
index 00000000..0372abd3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/scheduled_workflows_order_by_field.py
@@ -0,0 +1,37 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class ScheduledWorkflowsOrderByField(str, Enum):
+ """
+ ScheduledWorkflowsOrderByField
+ """
+
+ """
+ allowed enum values
+ """
+ TRIGGERAT = "triggerAt"
+ CREATEDAT = "createdAt"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of ScheduledWorkflowsOrderByField from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/semaphore_slots.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/semaphore_slots.py
new file mode 100644
index 00000000..1e7c6242
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/semaphore_slots.py
@@ -0,0 +1,113 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.step_run_status import StepRunStatus
+
+
+class SemaphoreSlots(BaseModel):
+ """
+ SemaphoreSlots
+ """ # noqa: E501
+
+ step_run_id: StrictStr = Field(description="The step run id.", alias="stepRunId")
+ action_id: StrictStr = Field(description="The action id.", alias="actionId")
+ started_at: Optional[datetime] = Field(
+ default=None, description="The time this slot was started.", alias="startedAt"
+ )
+ timeout_at: Optional[datetime] = Field(
+ default=None, description="The time this slot will timeout.", alias="timeoutAt"
+ )
+ workflow_run_id: StrictStr = Field(
+ description="The workflow run id.", alias="workflowRunId"
+ )
+ status: StepRunStatus
+ __properties: ClassVar[List[str]] = [
+ "stepRunId",
+ "actionId",
+ "startedAt",
+ "timeoutAt",
+ "workflowRunId",
+ "status",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of SemaphoreSlots from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of SemaphoreSlots from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "stepRunId": obj.get("stepRunId"),
+ "actionId": obj.get("actionId"),
+ "startedAt": obj.get("startedAt"),
+ "timeoutAt": obj.get("timeoutAt"),
+ "workflowRunId": obj.get("workflowRunId"),
+ "status": obj.get("status"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/slack_webhook.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/slack_webhook.py
new file mode 100644
index 00000000..6cc3f4c8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/slack_webhook.py
@@ -0,0 +1,127 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class SlackWebhook(BaseModel):
+ """
+ SlackWebhook
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ tenant_id: StrictStr = Field(
+ description="The unique identifier for the tenant that the SNS integration belongs to.",
+ alias="tenantId",
+ )
+ team_name: StrictStr = Field(
+ description="The team name associated with this slack webhook.",
+ alias="teamName",
+ )
+ team_id: StrictStr = Field(
+ description="The team id associated with this slack webhook.", alias="teamId"
+ )
+ channel_name: StrictStr = Field(
+ description="The channel name associated with this slack webhook.",
+ alias="channelName",
+ )
+ channel_id: StrictStr = Field(
+ description="The channel id associated with this slack webhook.",
+ alias="channelId",
+ )
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "tenantId",
+ "teamName",
+ "teamId",
+ "channelName",
+ "channelId",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of SlackWebhook from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of SlackWebhook from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "tenantId": obj.get("tenantId"),
+ "teamName": obj.get("teamName"),
+ "teamId": obj.get("teamId"),
+ "channelName": obj.get("channelName"),
+ "channelId": obj.get("channelId"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/sns_integration.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/sns_integration.py
new file mode 100644
index 00000000..7fcda4aa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/sns_integration.py
@@ -0,0 +1,114 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class SNSIntegration(BaseModel):
+ """
+ SNSIntegration
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ tenant_id: StrictStr = Field(
+ description="The unique identifier for the tenant that the SNS integration belongs to.",
+ alias="tenantId",
+ )
+ topic_arn: StrictStr = Field(
+ description="The Amazon Resource Name (ARN) of the SNS topic.", alias="topicArn"
+ )
+ ingest_url: Optional[StrictStr] = Field(
+ default=None, description="The URL to send SNS messages to.", alias="ingestUrl"
+ )
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "tenantId",
+ "topicArn",
+ "ingestUrl",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of SNSIntegration from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of SNSIntegration from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "tenantId": obj.get("tenantId"),
+ "topicArn": obj.get("topicArn"),
+ "ingestUrl": obj.get("ingestUrl"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step.py
new file mode 100644
index 00000000..2014b7e9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step.py
@@ -0,0 +1,123 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class Step(BaseModel):
+ """
+ Step
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ readable_id: StrictStr = Field(
+ description="The readable id of the step.", alias="readableId"
+ )
+ tenant_id: StrictStr = Field(alias="tenantId")
+ job_id: StrictStr = Field(alias="jobId")
+ action: StrictStr
+ timeout: Optional[StrictStr] = Field(
+ default=None, description="The timeout of the step."
+ )
+ children: Optional[List[StrictStr]] = None
+ parents: Optional[List[StrictStr]] = None
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "readableId",
+ "tenantId",
+ "jobId",
+ "action",
+ "timeout",
+ "children",
+ "parents",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of Step from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of Step from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "readableId": obj.get("readableId"),
+ "tenantId": obj.get("tenantId"),
+ "jobId": obj.get("jobId"),
+ "action": obj.get("action"),
+ "timeout": obj.get("timeout"),
+ "children": obj.get("children"),
+ "parents": obj.get("parents"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run.py
new file mode 100644
index 00000000..7e6b4b22
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run.py
@@ -0,0 +1,202 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.step import Step
+from hatchet_sdk.clients.rest.models.step_run_status import StepRunStatus
+
+
+class StepRun(BaseModel):
+ """
+ StepRun
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ tenant_id: StrictStr = Field(alias="tenantId")
+ job_run_id: StrictStr = Field(alias="jobRunId")
+ job_run: Optional[JobRun] = Field(default=None, alias="jobRun")
+ step_id: StrictStr = Field(alias="stepId")
+ step: Optional[Step] = None
+ child_workflows_count: Optional[StrictInt] = Field(
+ default=None, alias="childWorkflowsCount"
+ )
+ parents: Optional[List[StrictStr]] = None
+ child_workflow_runs: Optional[List[StrictStr]] = Field(
+ default=None, alias="childWorkflowRuns"
+ )
+ worker_id: Optional[StrictStr] = Field(default=None, alias="workerId")
+ input: Optional[StrictStr] = None
+ output: Optional[StrictStr] = None
+ status: StepRunStatus
+ requeue_after: Optional[datetime] = Field(default=None, alias="requeueAfter")
+ result: Optional[Dict[str, Any]] = None
+ error: Optional[StrictStr] = None
+ started_at: Optional[datetime] = Field(default=None, alias="startedAt")
+ started_at_epoch: Optional[StrictInt] = Field(default=None, alias="startedAtEpoch")
+ finished_at: Optional[datetime] = Field(default=None, alias="finishedAt")
+ finished_at_epoch: Optional[StrictInt] = Field(
+ default=None, alias="finishedAtEpoch"
+ )
+ timeout_at: Optional[datetime] = Field(default=None, alias="timeoutAt")
+ timeout_at_epoch: Optional[StrictInt] = Field(default=None, alias="timeoutAtEpoch")
+ cancelled_at: Optional[datetime] = Field(default=None, alias="cancelledAt")
+ cancelled_at_epoch: Optional[StrictInt] = Field(
+ default=None, alias="cancelledAtEpoch"
+ )
+ cancelled_reason: Optional[StrictStr] = Field(default=None, alias="cancelledReason")
+ cancelled_error: Optional[StrictStr] = Field(default=None, alias="cancelledError")
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "tenantId",
+ "jobRunId",
+ "jobRun",
+ "stepId",
+ "step",
+ "childWorkflowsCount",
+ "parents",
+ "childWorkflowRuns",
+ "workerId",
+ "input",
+ "output",
+ "status",
+ "requeueAfter",
+ "result",
+ "error",
+ "startedAt",
+ "startedAtEpoch",
+ "finishedAt",
+ "finishedAtEpoch",
+ "timeoutAt",
+ "timeoutAtEpoch",
+ "cancelledAt",
+ "cancelledAtEpoch",
+ "cancelledReason",
+ "cancelledError",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of StepRun from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of job_run
+ if self.job_run:
+ _dict["jobRun"] = self.job_run.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of step
+ if self.step:
+ _dict["step"] = self.step.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of StepRun from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "tenantId": obj.get("tenantId"),
+ "jobRunId": obj.get("jobRunId"),
+ "jobRun": (
+ JobRun.from_dict(obj["jobRun"])
+ if obj.get("jobRun") is not None
+ else None
+ ),
+ "stepId": obj.get("stepId"),
+ "step": (
+ Step.from_dict(obj["step"]) if obj.get("step") is not None else None
+ ),
+ "childWorkflowsCount": obj.get("childWorkflowsCount"),
+ "parents": obj.get("parents"),
+ "childWorkflowRuns": obj.get("childWorkflowRuns"),
+ "workerId": obj.get("workerId"),
+ "input": obj.get("input"),
+ "output": obj.get("output"),
+ "status": obj.get("status"),
+ "requeueAfter": obj.get("requeueAfter"),
+ "result": obj.get("result"),
+ "error": obj.get("error"),
+ "startedAt": obj.get("startedAt"),
+ "startedAtEpoch": obj.get("startedAtEpoch"),
+ "finishedAt": obj.get("finishedAt"),
+ "finishedAtEpoch": obj.get("finishedAtEpoch"),
+ "timeoutAt": obj.get("timeoutAt"),
+ "timeoutAtEpoch": obj.get("timeoutAtEpoch"),
+ "cancelledAt": obj.get("cancelledAt"),
+ "cancelledAtEpoch": obj.get("cancelledAtEpoch"),
+ "cancelledReason": obj.get("cancelledReason"),
+ "cancelledError": obj.get("cancelledError"),
+ }
+ )
+ return _obj
+
+
+from hatchet_sdk.clients.rest.models.job_run import JobRun
+
+# TODO: Rewrite to not use raise_errors
+StepRun.model_rebuild(raise_errors=False)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_archive.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_archive.py
new file mode 100644
index 00000000..7476174d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_archive.py
@@ -0,0 +1,142 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Self
+
+
+class StepRunArchive(BaseModel):
+ """
+ StepRunArchive
+ """ # noqa: E501
+
+ step_run_id: StrictStr = Field(alias="stepRunId")
+ order: StrictInt
+ input: Optional[StrictStr] = None
+ output: Optional[StrictStr] = None
+ started_at: Optional[datetime] = Field(default=None, alias="startedAt")
+ error: Optional[StrictStr] = None
+ retry_count: StrictInt = Field(alias="retryCount")
+ created_at: datetime = Field(alias="createdAt")
+ started_at_epoch: Optional[StrictInt] = Field(default=None, alias="startedAtEpoch")
+ finished_at: Optional[datetime] = Field(default=None, alias="finishedAt")
+ finished_at_epoch: Optional[StrictInt] = Field(
+ default=None, alias="finishedAtEpoch"
+ )
+ timeout_at: Optional[datetime] = Field(default=None, alias="timeoutAt")
+ timeout_at_epoch: Optional[StrictInt] = Field(default=None, alias="timeoutAtEpoch")
+ cancelled_at: Optional[datetime] = Field(default=None, alias="cancelledAt")
+ cancelled_at_epoch: Optional[StrictInt] = Field(
+ default=None, alias="cancelledAtEpoch"
+ )
+ cancelled_reason: Optional[StrictStr] = Field(default=None, alias="cancelledReason")
+ cancelled_error: Optional[StrictStr] = Field(default=None, alias="cancelledError")
+ __properties: ClassVar[List[str]] = [
+ "stepRunId",
+ "order",
+ "input",
+ "output",
+ "startedAt",
+ "error",
+ "retryCount",
+ "createdAt",
+ "startedAtEpoch",
+ "finishedAt",
+ "finishedAtEpoch",
+ "timeoutAt",
+ "timeoutAtEpoch",
+ "cancelledAt",
+ "cancelledAtEpoch",
+ "cancelledReason",
+ "cancelledError",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of StepRunArchive from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of StepRunArchive from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "stepRunId": obj.get("stepRunId"),
+ "order": obj.get("order"),
+ "input": obj.get("input"),
+ "output": obj.get("output"),
+ "startedAt": obj.get("startedAt"),
+ "error": obj.get("error"),
+ "retryCount": obj.get("retryCount"),
+ "createdAt": obj.get("createdAt"),
+ "startedAtEpoch": obj.get("startedAtEpoch"),
+ "finishedAt": obj.get("finishedAt"),
+ "finishedAtEpoch": obj.get("finishedAtEpoch"),
+ "timeoutAt": obj.get("timeoutAt"),
+ "timeoutAtEpoch": obj.get("timeoutAtEpoch"),
+ "cancelledAt": obj.get("cancelledAt"),
+ "cancelledAtEpoch": obj.get("cancelledAtEpoch"),
+ "cancelledReason": obj.get("cancelledReason"),
+ "cancelledError": obj.get("cancelledError"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_archive_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_archive_list.py
new file mode 100644
index 00000000..eb4bcef2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_archive_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.step_run_archive import StepRunArchive
+
+
+class StepRunArchiveList(BaseModel):
+ """
+ StepRunArchiveList
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[StepRunArchive]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of StepRunArchiveList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of StepRunArchiveList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [StepRunArchive.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_diff.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_diff.py
new file mode 100644
index 00000000..78848dd7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_diff.py
@@ -0,0 +1,91 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, StrictStr
+from typing_extensions import Self
+
+
+class StepRunDiff(BaseModel):
+ """
+ StepRunDiff
+ """ # noqa: E501
+
+ key: StrictStr
+ original: StrictStr
+ modified: StrictStr
+ __properties: ClassVar[List[str]] = ["key", "original", "modified"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of StepRunDiff from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of StepRunDiff from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "key": obj.get("key"),
+ "original": obj.get("original"),
+ "modified": obj.get("modified"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event.py
new file mode 100644
index 00000000..c5909d24
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event.py
@@ -0,0 +1,120 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.step_run_event_reason import StepRunEventReason
+from hatchet_sdk.clients.rest.models.step_run_event_severity import StepRunEventSeverity
+
+
+class StepRunEvent(BaseModel):
+ """
+ StepRunEvent
+ """ # noqa: E501
+
+ id: StrictInt
+ time_first_seen: datetime = Field(alias="timeFirstSeen")
+ time_last_seen: datetime = Field(alias="timeLastSeen")
+ step_run_id: Optional[StrictStr] = Field(default=None, alias="stepRunId")
+ workflow_run_id: Optional[StrictStr] = Field(default=None, alias="workflowRunId")
+ reason: StepRunEventReason
+ severity: StepRunEventSeverity
+ message: StrictStr
+ count: StrictInt
+ data: Optional[Dict[str, Any]] = None
+ __properties: ClassVar[List[str]] = [
+ "id",
+ "timeFirstSeen",
+ "timeLastSeen",
+ "stepRunId",
+ "workflowRunId",
+ "reason",
+ "severity",
+ "message",
+ "count",
+ "data",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of StepRunEvent from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of StepRunEvent from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "id": obj.get("id"),
+ "timeFirstSeen": obj.get("timeFirstSeen"),
+ "timeLastSeen": obj.get("timeLastSeen"),
+ "stepRunId": obj.get("stepRunId"),
+ "workflowRunId": obj.get("workflowRunId"),
+ "reason": obj.get("reason"),
+ "severity": obj.get("severity"),
+ "message": obj.get("message"),
+ "count": obj.get("count"),
+ "data": obj.get("data"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event_list.py
new file mode 100644
index 00000000..f146eb8e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.step_run_event import StepRunEvent
+
+
+class StepRunEventList(BaseModel):
+ """
+ StepRunEventList
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[StepRunEvent]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of StepRunEventList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of StepRunEventList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [StepRunEvent.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event_reason.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event_reason.py
new file mode 100644
index 00000000..487fde06
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event_reason.py
@@ -0,0 +1,52 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class StepRunEventReason(str, Enum):
+ """
+ StepRunEventReason
+ """
+
+ """
+ allowed enum values
+ """
+ REQUEUED_NO_WORKER = "REQUEUED_NO_WORKER"
+ REQUEUED_RATE_LIMIT = "REQUEUED_RATE_LIMIT"
+ SCHEDULING_TIMED_OUT = "SCHEDULING_TIMED_OUT"
+ ASSIGNED = "ASSIGNED"
+ STARTED = "STARTED"
+ ACKNOWLEDGED = "ACKNOWLEDGED"
+ FINISHED = "FINISHED"
+ FAILED = "FAILED"
+ RETRYING = "RETRYING"
+ CANCELLED = "CANCELLED"
+ TIMEOUT_REFRESHED = "TIMEOUT_REFRESHED"
+ REASSIGNED = "REASSIGNED"
+ TIMED_OUT = "TIMED_OUT"
+ SLOT_RELEASED = "SLOT_RELEASED"
+ RETRIED_BY_USER = "RETRIED_BY_USER"
+ WORKFLOW_RUN_GROUP_KEY_SUCCEEDED = "WORKFLOW_RUN_GROUP_KEY_SUCCEEDED"
+ WORKFLOW_RUN_GROUP_KEY_FAILED = "WORKFLOW_RUN_GROUP_KEY_FAILED"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of StepRunEventReason from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event_severity.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event_severity.py
new file mode 100644
index 00000000..a8a39912
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_event_severity.py
@@ -0,0 +1,38 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class StepRunEventSeverity(str, Enum):
+ """
+ StepRunEventSeverity
+ """
+
+ """
+ allowed enum values
+ """
+ INFO = "INFO"
+ WARNING = "WARNING"
+ CRITICAL = "CRITICAL"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of StepRunEventSeverity from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_status.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_status.py
new file mode 100644
index 00000000..1e1114f0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/step_run_status.py
@@ -0,0 +1,44 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class StepRunStatus(str, Enum):
+ """
+ StepRunStatus
+ """
+
+ """
+ allowed enum values
+ """
+ PENDING = "PENDING"
+ PENDING_ASSIGNMENT = "PENDING_ASSIGNMENT"
+ ASSIGNED = "ASSIGNED"
+ RUNNING = "RUNNING"
+ SUCCEEDED = "SUCCEEDED"
+ FAILED = "FAILED"
+ CANCELLED = "CANCELLED"
+ CANCELLING = "CANCELLING"
+ BACKOFF = "BACKOFF"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of StepRunStatus from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant.py
new file mode 100644
index 00000000..97a5863e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant.py
@@ -0,0 +1,118 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class Tenant(BaseModel):
+ """
+ Tenant
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ name: StrictStr = Field(description="The name of the tenant.")
+ slug: StrictStr = Field(description="The slug of the tenant.")
+ analytics_opt_out: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether the tenant has opted out of analytics.",
+ alias="analyticsOptOut",
+ )
+ alert_member_emails: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether to alert tenant members.",
+ alias="alertMemberEmails",
+ )
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "name",
+ "slug",
+ "analyticsOptOut",
+ "alertMemberEmails",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of Tenant from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of Tenant from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "name": obj.get("name"),
+ "slug": obj.get("slug"),
+ "analyticsOptOut": obj.get("analyticsOptOut"),
+ "alertMemberEmails": obj.get("alertMemberEmails"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_alert_email_group.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_alert_email_group.py
new file mode 100644
index 00000000..2b0586ed
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_alert_email_group.py
@@ -0,0 +1,98 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class TenantAlertEmailGroup(BaseModel):
+ """
+ TenantAlertEmailGroup
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ emails: List[StrictStr] = Field(description="A list of emails for users")
+ __properties: ClassVar[List[str]] = ["metadata", "emails"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TenantAlertEmailGroup from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TenantAlertEmailGroup from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "emails": obj.get("emails"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_alert_email_group_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_alert_email_group_list.py
new file mode 100644
index 00000000..73d67df4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_alert_email_group_list.py
@@ -0,0 +1,112 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.tenant_alert_email_group import (
+ TenantAlertEmailGroup,
+)
+
+
+class TenantAlertEmailGroupList(BaseModel):
+ """
+ TenantAlertEmailGroupList
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[TenantAlertEmailGroup]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TenantAlertEmailGroupList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TenantAlertEmailGroupList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [TenantAlertEmailGroup.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_alerting_settings.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_alerting_settings.py
new file mode 100644
index 00000000..e2502486
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_alerting_settings.py
@@ -0,0 +1,143 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class TenantAlertingSettings(BaseModel):
+ """
+ TenantAlertingSettings
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ alert_member_emails: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether to alert tenant members.",
+ alias="alertMemberEmails",
+ )
+ enable_workflow_run_failure_alerts: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether to send alerts when workflow runs fail.",
+ alias="enableWorkflowRunFailureAlerts",
+ )
+ enable_expiring_token_alerts: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether to enable alerts when tokens are approaching expiration.",
+ alias="enableExpiringTokenAlerts",
+ )
+ enable_tenant_resource_limit_alerts: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether to enable alerts when tenant resources are approaching limits.",
+ alias="enableTenantResourceLimitAlerts",
+ )
+ max_alerting_frequency: StrictStr = Field(
+ description="The max frequency at which to alert.", alias="maxAlertingFrequency"
+ )
+ last_alerted_at: Optional[datetime] = Field(
+ default=None,
+ description="The last time an alert was sent.",
+ alias="lastAlertedAt",
+ )
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "alertMemberEmails",
+ "enableWorkflowRunFailureAlerts",
+ "enableExpiringTokenAlerts",
+ "enableTenantResourceLimitAlerts",
+ "maxAlertingFrequency",
+ "lastAlertedAt",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TenantAlertingSettings from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TenantAlertingSettings from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "alertMemberEmails": obj.get("alertMemberEmails"),
+ "enableWorkflowRunFailureAlerts": obj.get(
+ "enableWorkflowRunFailureAlerts"
+ ),
+ "enableExpiringTokenAlerts": obj.get("enableExpiringTokenAlerts"),
+ "enableTenantResourceLimitAlerts": obj.get(
+ "enableTenantResourceLimitAlerts"
+ ),
+ "maxAlertingFrequency": obj.get("maxAlertingFrequency"),
+ "lastAlertedAt": obj.get("lastAlertedAt"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_invite.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_invite.py
new file mode 100644
index 00000000..168bfa3c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_invite.py
@@ -0,0 +1,120 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.tenant_member_role import TenantMemberRole
+
+
+class TenantInvite(BaseModel):
+ """
+ TenantInvite
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ email: StrictStr = Field(description="The email of the user to invite.")
+ role: TenantMemberRole = Field(description="The role of the user in the tenant.")
+ tenant_id: StrictStr = Field(
+ description="The tenant id associated with this tenant invite.",
+ alias="tenantId",
+ )
+ tenant_name: Optional[StrictStr] = Field(
+ default=None, description="The tenant name for the tenant.", alias="tenantName"
+ )
+ expires: datetime = Field(description="The time that this invite expires.")
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "email",
+ "role",
+ "tenantId",
+ "tenantName",
+ "expires",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TenantInvite from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TenantInvite from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "email": obj.get("email"),
+ "role": obj.get("role"),
+ "tenantId": obj.get("tenantId"),
+ "tenantName": obj.get("tenantName"),
+ "expires": obj.get("expires"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_invite_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_invite_list.py
new file mode 100644
index 00000000..0ed078ef
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_invite_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.tenant_invite import TenantInvite
+
+
+class TenantInviteList(BaseModel):
+ """
+ TenantInviteList
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[TenantInvite]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TenantInviteList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TenantInviteList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [TenantInvite.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_list.py
new file mode 100644
index 00000000..2dbb320e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.tenant import Tenant
+
+
+class TenantList(BaseModel):
+ """
+ TenantList
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[Tenant]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TenantList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TenantList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [Tenant.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_member.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_member.py
new file mode 100644
index 00000000..540230c0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_member.py
@@ -0,0 +1,123 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.tenant import Tenant
+from hatchet_sdk.clients.rest.models.tenant_member_role import TenantMemberRole
+from hatchet_sdk.clients.rest.models.user_tenant_public import UserTenantPublic
+
+
+class TenantMember(BaseModel):
+ """
+ TenantMember
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ user: UserTenantPublic = Field(
+ description="The user associated with this tenant member."
+ )
+ role: TenantMemberRole = Field(description="The role of the user in the tenant.")
+ tenant: Optional[Tenant] = Field(
+ default=None, description="The tenant associated with this tenant member."
+ )
+ __properties: ClassVar[List[str]] = ["metadata", "user", "role", "tenant"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TenantMember from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of user
+ if self.user:
+ _dict["user"] = self.user.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of tenant
+ if self.tenant:
+ _dict["tenant"] = self.tenant.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TenantMember from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "user": (
+ UserTenantPublic.from_dict(obj["user"])
+ if obj.get("user") is not None
+ else None
+ ),
+ "role": obj.get("role"),
+ "tenant": (
+ Tenant.from_dict(obj["tenant"])
+ if obj.get("tenant") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_member_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_member_list.py
new file mode 100644
index 00000000..5aabdcd1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_member_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.tenant_member import TenantMember
+
+
+class TenantMemberList(BaseModel):
+ """
+ TenantMemberList
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[TenantMember]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TenantMemberList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TenantMemberList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [TenantMember.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_member_role.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_member_role.py
new file mode 100644
index 00000000..446c7044
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_member_role.py
@@ -0,0 +1,38 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class TenantMemberRole(str, Enum):
+ """
+ TenantMemberRole
+ """
+
+ """
+ allowed enum values
+ """
+ OWNER = "OWNER"
+ ADMIN = "ADMIN"
+ MEMBER = "MEMBER"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of TenantMemberRole from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_queue_metrics.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_queue_metrics.py
new file mode 100644
index 00000000..4043d47f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_queue_metrics.py
@@ -0,0 +1,116 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.queue_metrics import QueueMetrics
+
+
+class TenantQueueMetrics(BaseModel):
+ """
+ TenantQueueMetrics
+ """ # noqa: E501
+
+ total: Optional[QueueMetrics] = Field(
+ default=None, description="The total queue metrics."
+ )
+ workflow: Optional[Dict[str, QueueMetrics]] = None
+ queues: Optional[Dict[str, StrictInt]] = None
+ __properties: ClassVar[List[str]] = ["total", "workflow", "queues"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TenantQueueMetrics from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of total
+ if self.total:
+ _dict["total"] = self.total.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each value in workflow (dict)
+ _field_dict = {}
+ if self.workflow:
+ for _key_workflow in self.workflow:
+ if self.workflow[_key_workflow]:
+ _field_dict[_key_workflow] = self.workflow[_key_workflow].to_dict()
+ _dict["workflow"] = _field_dict
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TenantQueueMetrics from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "total": (
+ QueueMetrics.from_dict(obj["total"])
+ if obj.get("total") is not None
+ else None
+ ),
+ "workflow": (
+ dict(
+ (_k, QueueMetrics.from_dict(_v))
+ for _k, _v in obj["workflow"].items()
+ )
+ if obj.get("workflow") is not None
+ else None
+ ),
+ "queues": obj.get("queues"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_resource.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_resource.py
new file mode 100644
index 00000000..0dbdbf60
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_resource.py
@@ -0,0 +1,40 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class TenantResource(str, Enum):
+ """
+ TenantResource
+ """
+
+ """
+ allowed enum values
+ """
+ WORKER = "WORKER"
+ EVENT = "EVENT"
+ WORKFLOW_RUN = "WORKFLOW_RUN"
+ CRON = "CRON"
+ SCHEDULE = "SCHEDULE"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of TenantResource from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_resource_limit.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_resource_limit.py
new file mode 100644
index 00000000..722b7854
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_resource_limit.py
@@ -0,0 +1,135 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.tenant_resource import TenantResource
+
+
+class TenantResourceLimit(BaseModel):
+ """
+ TenantResourceLimit
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ resource: TenantResource = Field(
+ description="The resource associated with this limit."
+ )
+ limit_value: StrictInt = Field(
+ description="The limit associated with this limit.", alias="limitValue"
+ )
+ alarm_value: Optional[StrictInt] = Field(
+ default=None,
+ description="The alarm value associated with this limit to warn of approaching limit value.",
+ alias="alarmValue",
+ )
+ value: StrictInt = Field(
+ description="The current value associated with this limit."
+ )
+ window: Optional[StrictStr] = Field(
+ default=None,
+ description="The meter window for the limit. (i.e. 1 day, 1 week, 1 month)",
+ )
+ last_refill: Optional[datetime] = Field(
+ default=None,
+ description="The last time the limit was refilled.",
+ alias="lastRefill",
+ )
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "resource",
+ "limitValue",
+ "alarmValue",
+ "value",
+ "window",
+ "lastRefill",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TenantResourceLimit from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TenantResourceLimit from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "resource": obj.get("resource"),
+ "limitValue": obj.get("limitValue"),
+ "alarmValue": obj.get("alarmValue"),
+ "value": obj.get("value"),
+ "window": obj.get("window"),
+ "lastRefill": obj.get("lastRefill"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_resource_policy.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_resource_policy.py
new file mode 100644
index 00000000..b9e5181f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_resource_policy.py
@@ -0,0 +1,102 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.tenant_resource_limit import TenantResourceLimit
+
+
+class TenantResourcePolicy(BaseModel):
+ """
+ TenantResourcePolicy
+ """ # noqa: E501
+
+ limits: List[TenantResourceLimit] = Field(
+ description="A list of resource limits for the tenant."
+ )
+ __properties: ClassVar[List[str]] = ["limits"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TenantResourcePolicy from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in limits (list)
+ _items = []
+ if self.limits:
+ for _item_limits in self.limits:
+ if _item_limits:
+ _items.append(_item_limits.to_dict())
+ _dict["limits"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TenantResourcePolicy from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "limits": (
+ [TenantResourceLimit.from_dict(_item) for _item in obj["limits"]]
+ if obj.get("limits") is not None
+ else None
+ )
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py
new file mode 100644
index 00000000..4b9bfc81
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, StrictInt
+from typing_extensions import Self
+
+
+class TenantStepRunQueueMetrics(BaseModel):
+ """
+ TenantStepRunQueueMetrics
+ """ # noqa: E501
+
+ queues: Optional[Dict[str, StrictInt]] = None
+ __properties: ClassVar[List[str]] = ["queues"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TenantStepRunQueueMetrics from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TenantStepRunQueueMetrics from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"queues": obj.get("queues")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/trigger_workflow_run_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/trigger_workflow_run_request.py
new file mode 100644
index 00000000..5600c6a0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/trigger_workflow_run_request.py
@@ -0,0 +1,91 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Self
+
+
+class TriggerWorkflowRunRequest(BaseModel):
+ """
+ TriggerWorkflowRunRequest
+ """ # noqa: E501
+
+ input: Dict[str, Any]
+ additional_metadata: Optional[Dict[str, Any]] = Field(
+ default=None, alias="additionalMetadata"
+ )
+ __properties: ClassVar[List[str]] = ["input", "additionalMetadata"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of TriggerWorkflowRunRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of TriggerWorkflowRunRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "input": obj.get("input"),
+ "additionalMetadata": obj.get("additionalMetadata"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_tenant_alert_email_group_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_tenant_alert_email_group_request.py
new file mode 100644
index 00000000..d4dec094
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_tenant_alert_email_group_request.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class UpdateTenantAlertEmailGroupRequest(BaseModel):
+ """
+ UpdateTenantAlertEmailGroupRequest
+ """ # noqa: E501
+
+ emails: List[StrictStr] = Field(description="A list of emails for users")
+ __properties: ClassVar[List[str]] = ["emails"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of UpdateTenantAlertEmailGroupRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of UpdateTenantAlertEmailGroupRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"emails": obj.get("emails")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_tenant_invite_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_tenant_invite_request.py
new file mode 100644
index 00000000..86c36a9b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_tenant_invite_request.py
@@ -0,0 +1,85 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.tenant_member_role import TenantMemberRole
+
+
+class UpdateTenantInviteRequest(BaseModel):
+ """
+ UpdateTenantInviteRequest
+ """ # noqa: E501
+
+ role: TenantMemberRole = Field(description="The role of the user in the tenant.")
+ __properties: ClassVar[List[str]] = ["role"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of UpdateTenantInviteRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of UpdateTenantInviteRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"role": obj.get("role")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_tenant_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_tenant_request.py
new file mode 100644
index 00000000..431efe00
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_tenant_request.py
@@ -0,0 +1,137 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
+from typing_extensions import Self
+
+
+class UpdateTenantRequest(BaseModel):
+ """
+ UpdateTenantRequest
+ """ # noqa: E501
+
+ name: Optional[StrictStr] = Field(
+ default=None, description="The name of the tenant."
+ )
+ analytics_opt_out: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether the tenant has opted out of analytics.",
+ alias="analyticsOptOut",
+ )
+ alert_member_emails: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether to alert tenant members.",
+ alias="alertMemberEmails",
+ )
+ enable_workflow_run_failure_alerts: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether to send alerts when workflow runs fail.",
+ alias="enableWorkflowRunFailureAlerts",
+ )
+ enable_expiring_token_alerts: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether to enable alerts when tokens are approaching expiration.",
+ alias="enableExpiringTokenAlerts",
+ )
+ enable_tenant_resource_limit_alerts: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether to enable alerts when tenant resources are approaching limits.",
+ alias="enableTenantResourceLimitAlerts",
+ )
+ max_alerting_frequency: Optional[StrictStr] = Field(
+ default=None,
+ description="The max frequency at which to alert.",
+ alias="maxAlertingFrequency",
+ )
+ __properties: ClassVar[List[str]] = [
+ "name",
+ "analyticsOptOut",
+ "alertMemberEmails",
+ "enableWorkflowRunFailureAlerts",
+ "enableExpiringTokenAlerts",
+ "enableTenantResourceLimitAlerts",
+ "maxAlertingFrequency",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of UpdateTenantRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of UpdateTenantRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "name": obj.get("name"),
+ "analyticsOptOut": obj.get("analyticsOptOut"),
+ "alertMemberEmails": obj.get("alertMemberEmails"),
+ "enableWorkflowRunFailureAlerts": obj.get(
+ "enableWorkflowRunFailureAlerts"
+ ),
+ "enableExpiringTokenAlerts": obj.get("enableExpiringTokenAlerts"),
+ "enableTenantResourceLimitAlerts": obj.get(
+ "enableTenantResourceLimitAlerts"
+ ),
+ "maxAlertingFrequency": obj.get("maxAlertingFrequency"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_worker_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_worker_request.py
new file mode 100644
index 00000000..73904979
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/update_worker_request.py
@@ -0,0 +1,87 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictBool
+from typing_extensions import Self
+
+
+class UpdateWorkerRequest(BaseModel):
+ """
+ UpdateWorkerRequest
+ """ # noqa: E501
+
+ is_paused: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether the worker is paused and cannot accept new runs.",
+ alias="isPaused",
+ )
+ __properties: ClassVar[List[str]] = ["isPaused"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of UpdateWorkerRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of UpdateWorkerRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"isPaused": obj.get("isPaused")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user.py
new file mode 100644
index 00000000..a806062a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user.py
@@ -0,0 +1,126 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class User(BaseModel):
+ """
+ User
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ name: Optional[StrictStr] = Field(
+ default=None, description="The display name of the user."
+ )
+ email: StrictStr = Field(description="The email address of the user.")
+ email_verified: StrictBool = Field(
+ description="Whether the user has verified their email address.",
+ alias="emailVerified",
+ )
+ has_password: Optional[StrictBool] = Field(
+ default=None,
+ description="Whether the user has a password set.",
+ alias="hasPassword",
+ )
+ email_hash: Optional[StrictStr] = Field(
+ default=None,
+ description="A hash of the user's email address for use with Pylon Support Chat",
+ alias="emailHash",
+ )
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "name",
+ "email",
+ "emailVerified",
+ "hasPassword",
+ "emailHash",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of User from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of User from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "name": obj.get("name"),
+ "email": obj.get("email"),
+ "emailVerified": obj.get("emailVerified"),
+ "hasPassword": obj.get("hasPassword"),
+ "emailHash": obj.get("emailHash"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_change_password_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_change_password_request.py
new file mode 100644
index 00000000..b0b87ac4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_change_password_request.py
@@ -0,0 +1,88 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class UserChangePasswordRequest(BaseModel):
+ """
+ UserChangePasswordRequest
+ """ # noqa: E501
+
+ password: StrictStr = Field(description="The password of the user.")
+ new_password: StrictStr = Field(
+ description="The new password for the user.", alias="newPassword"
+ )
+ __properties: ClassVar[List[str]] = ["password", "newPassword"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of UserChangePasswordRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of UserChangePasswordRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {"password": obj.get("password"), "newPassword": obj.get("newPassword")}
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_login_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_login_request.py
new file mode 100644
index 00000000..a9ab5a8d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_login_request.py
@@ -0,0 +1,86 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class UserLoginRequest(BaseModel):
+ """
+ UserLoginRequest
+ """ # noqa: E501
+
+ email: StrictStr = Field(description="The email address of the user.")
+ password: StrictStr = Field(description="The password of the user.")
+ __properties: ClassVar[List[str]] = ["email", "password"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of UserLoginRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of UserLoginRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {"email": obj.get("email"), "password": obj.get("password")}
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_register_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_register_request.py
new file mode 100644
index 00000000..bf0c1dfa
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_register_request.py
@@ -0,0 +1,91 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class UserRegisterRequest(BaseModel):
+ """
+ UserRegisterRequest
+ """ # noqa: E501
+
+ name: StrictStr = Field(description="The name of the user.")
+ email: StrictStr = Field(description="The email address of the user.")
+ password: StrictStr = Field(description="The password of the user.")
+ __properties: ClassVar[List[str]] = ["name", "email", "password"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of UserRegisterRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of UserRegisterRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "name": obj.get("name"),
+ "email": obj.get("email"),
+ "password": obj.get("password"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_tenant_memberships_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_tenant_memberships_list.py
new file mode 100644
index 00000000..98b8041b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_tenant_memberships_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.tenant_member import TenantMember
+
+
+class UserTenantMembershipsList(BaseModel):
+ """
+ UserTenantMembershipsList
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[TenantMember]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of UserTenantMembershipsList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of UserTenantMembershipsList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [TenantMember.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_tenant_public.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_tenant_public.py
new file mode 100644
index 00000000..42e4fe0c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/user_tenant_public.py
@@ -0,0 +1,86 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class UserTenantPublic(BaseModel):
+ """
+ UserTenantPublic
+ """ # noqa: E501
+
+ email: StrictStr = Field(description="The email address of the user.")
+ name: Optional[StrictStr] = Field(
+ default=None, description="The display name of the user."
+ )
+ __properties: ClassVar[List[str]] = ["email", "name"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of UserTenantPublic from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of UserTenantPublic from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"email": obj.get("email"), "name": obj.get("name")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker.py
new file mode 100644
index 00000000..28b5731d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker.py
@@ -0,0 +1,100 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class WebhookWorker(BaseModel):
+ """
+ WebhookWorker
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ name: StrictStr = Field(description="The name of the webhook worker.")
+ url: StrictStr = Field(description="The webhook url.")
+ __properties: ClassVar[List[str]] = ["metadata", "name", "url"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WebhookWorker from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WebhookWorker from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "name": obj.get("name"),
+ "url": obj.get("url"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_create_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_create_request.py
new file mode 100644
index 00000000..616277e1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_create_request.py
@@ -0,0 +1,94 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Annotated, Self
+
+
+class WebhookWorkerCreateRequest(BaseModel):
+ """
+ WebhookWorkerCreateRequest
+ """ # noqa: E501
+
+ name: StrictStr = Field(description="The name of the webhook worker.")
+ url: StrictStr = Field(description="The webhook url.")
+ secret: Optional[Annotated[str, Field(min_length=32, strict=True)]] = Field(
+ default=None,
+ description="The secret key for validation. If not provided, a random secret will be generated.",
+ )
+ __properties: ClassVar[List[str]] = ["name", "url", "secret"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WebhookWorkerCreateRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WebhookWorkerCreateRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "name": obj.get("name"),
+ "url": obj.get("url"),
+ "secret": obj.get("secret"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_create_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_create_response.py
new file mode 100644
index 00000000..819edec0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_create_response.py
@@ -0,0 +1,96 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.webhook_worker_created import WebhookWorkerCreated
+
+
+class WebhookWorkerCreateResponse(BaseModel):
+ """
+ WebhookWorkerCreateResponse
+ """ # noqa: E501
+
+ worker: Optional[WebhookWorkerCreated] = None
+ __properties: ClassVar[List[str]] = ["worker"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WebhookWorkerCreateResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of worker
+ if self.worker:
+ _dict["worker"] = self.worker.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WebhookWorkerCreateResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "worker": (
+ WebhookWorkerCreated.from_dict(obj["worker"])
+ if obj.get("worker") is not None
+ else None
+ )
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_created.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_created.py
new file mode 100644
index 00000000..26a409ee
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_created.py
@@ -0,0 +1,102 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class WebhookWorkerCreated(BaseModel):
+ """
+ WebhookWorkerCreated
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ name: StrictStr = Field(description="The name of the webhook worker.")
+ url: StrictStr = Field(description="The webhook url.")
+ secret: StrictStr = Field(description="The secret key for validation.")
+ __properties: ClassVar[List[str]] = ["metadata", "name", "url", "secret"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WebhookWorkerCreated from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WebhookWorkerCreated from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "name": obj.get("name"),
+ "url": obj.get("url"),
+ "secret": obj.get("secret"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_list_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_list_response.py
new file mode 100644
index 00000000..a221e182
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_list_response.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.webhook_worker import WebhookWorker
+
+
+class WebhookWorkerListResponse(BaseModel):
+ """
+ WebhookWorkerListResponse
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[WebhookWorker]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WebhookWorkerListResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WebhookWorkerListResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [WebhookWorker.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_request.py
new file mode 100644
index 00000000..07adaa3d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_request.py
@@ -0,0 +1,102 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.webhook_worker_request_method import (
+ WebhookWorkerRequestMethod,
+)
+
+
+class WebhookWorkerRequest(BaseModel):
+ """
+ WebhookWorkerRequest
+ """ # noqa: E501
+
+ created_at: datetime = Field(
+ description="The date and time the request was created."
+ )
+ method: WebhookWorkerRequestMethod = Field(
+ description="The HTTP method used for the request."
+ )
+ status_code: StrictInt = Field(
+ description="The HTTP status code of the response.", alias="statusCode"
+ )
+ __properties: ClassVar[List[str]] = ["created_at", "method", "statusCode"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WebhookWorkerRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WebhookWorkerRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "created_at": obj.get("created_at"),
+ "method": obj.get("method"),
+ "statusCode": obj.get("statusCode"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_request_list_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_request_list_response.py
new file mode 100644
index 00000000..ec813a38
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_request_list_response.py
@@ -0,0 +1,102 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.webhook_worker_request import WebhookWorkerRequest
+
+
+class WebhookWorkerRequestListResponse(BaseModel):
+ """
+ WebhookWorkerRequestListResponse
+ """ # noqa: E501
+
+ requests: Optional[List[WebhookWorkerRequest]] = Field(
+ default=None, description="The list of webhook requests."
+ )
+ __properties: ClassVar[List[str]] = ["requests"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WebhookWorkerRequestListResponse from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in requests (list)
+ _items = []
+ if self.requests:
+ for _item_requests in self.requests:
+ if _item_requests:
+ _items.append(_item_requests.to_dict())
+ _dict["requests"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WebhookWorkerRequestListResponse from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "requests": (
+ [WebhookWorkerRequest.from_dict(_item) for _item in obj["requests"]]
+ if obj.get("requests") is not None
+ else None
+ )
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_request_method.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_request_method.py
new file mode 100644
index 00000000..14cb059f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/webhook_worker_request_method.py
@@ -0,0 +1,38 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class WebhookWorkerRequestMethod(str, Enum):
+ """
+ WebhookWorkerRequestMethod
+ """
+
+ """
+ allowed enum values
+ """
+ GET = "GET"
+ POST = "POST"
+ PUT = "PUT"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of WebhookWorkerRequestMethod from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker.py
new file mode 100644
index 00000000..03e9e4c4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker.py
@@ -0,0 +1,239 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator
+from typing_extensions import Annotated, Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.recent_step_runs import RecentStepRuns
+from hatchet_sdk.clients.rest.models.semaphore_slots import SemaphoreSlots
+from hatchet_sdk.clients.rest.models.worker_label import WorkerLabel
+from hatchet_sdk.clients.rest.models.worker_runtime_info import WorkerRuntimeInfo
+from hatchet_sdk.clients.rest.models.worker_type import WorkerType
+
+
+class Worker(BaseModel):
+ """
+ Worker
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ name: StrictStr = Field(description="The name of the worker.")
+ type: WorkerType
+ last_heartbeat_at: Optional[datetime] = Field(
+ default=None,
+ description="The time this worker last sent a heartbeat.",
+ alias="lastHeartbeatAt",
+ )
+ last_listener_established: Optional[datetime] = Field(
+ default=None,
+ description="The time this worker last sent a heartbeat.",
+ alias="lastListenerEstablished",
+ )
+ actions: Optional[List[StrictStr]] = Field(
+ default=None, description="The actions this worker can perform."
+ )
+ slots: Optional[List[SemaphoreSlots]] = Field(
+ default=None, description="The semaphore slot state for the worker."
+ )
+ recent_step_runs: Optional[List[RecentStepRuns]] = Field(
+ default=None,
+ description="The recent step runs for the worker.",
+ alias="recentStepRuns",
+ )
+ status: Optional[StrictStr] = Field(
+ default=None, description="The status of the worker."
+ )
+ max_runs: Optional[StrictInt] = Field(
+ default=None,
+ description="The maximum number of runs this worker can execute concurrently.",
+ alias="maxRuns",
+ )
+ available_runs: Optional[StrictInt] = Field(
+ default=None,
+ description="The number of runs this worker can execute concurrently.",
+ alias="availableRuns",
+ )
+ dispatcher_id: Optional[
+ Annotated[str, Field(min_length=36, strict=True, max_length=36)]
+ ] = Field(
+ default=None,
+ description="the id of the assigned dispatcher, in UUID format",
+ alias="dispatcherId",
+ )
+ labels: Optional[List[WorkerLabel]] = Field(
+ default=None, description="The current label state of the worker."
+ )
+ webhook_url: Optional[StrictStr] = Field(
+ default=None, description="The webhook URL for the worker.", alias="webhookUrl"
+ )
+ webhook_id: Optional[StrictStr] = Field(
+ default=None, description="The webhook ID for the worker.", alias="webhookId"
+ )
+ runtime_info: Optional[WorkerRuntimeInfo] = Field(default=None, alias="runtimeInfo")
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "name",
+ "type",
+ "lastHeartbeatAt",
+ "lastListenerEstablished",
+ "actions",
+ "slots",
+ "recentStepRuns",
+ "status",
+ "maxRuns",
+ "availableRuns",
+ "dispatcherId",
+ "labels",
+ "webhookUrl",
+ "webhookId",
+ "runtimeInfo",
+ ]
+
+ @field_validator("status")
+ def status_validate_enum(cls, value):
+ """Validates the enum"""
+ if value is None:
+ return value
+
+ if value not in set(["ACTIVE", "INACTIVE", "PAUSED"]):
+ raise ValueError(
+ "must be one of enum values ('ACTIVE', 'INACTIVE', 'PAUSED')"
+ )
+ return value
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of Worker from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in slots (list)
+ _items = []
+ if self.slots:
+ for _item_slots in self.slots:
+ if _item_slots:
+ _items.append(_item_slots.to_dict())
+ _dict["slots"] = _items
+ # override the default output from pydantic by calling `to_dict()` of each item in recent_step_runs (list)
+ _items = []
+ if self.recent_step_runs:
+ for _item_recent_step_runs in self.recent_step_runs:
+ if _item_recent_step_runs:
+ _items.append(_item_recent_step_runs.to_dict())
+ _dict["recentStepRuns"] = _items
+ # override the default output from pydantic by calling `to_dict()` of each item in labels (list)
+ _items = []
+ if self.labels:
+ for _item_labels in self.labels:
+ if _item_labels:
+ _items.append(_item_labels.to_dict())
+ _dict["labels"] = _items
+ # override the default output from pydantic by calling `to_dict()` of runtime_info
+ if self.runtime_info:
+ _dict["runtimeInfo"] = self.runtime_info.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of Worker from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "name": obj.get("name"),
+ "type": obj.get("type"),
+ "lastHeartbeatAt": obj.get("lastHeartbeatAt"),
+ "lastListenerEstablished": obj.get("lastListenerEstablished"),
+ "actions": obj.get("actions"),
+ "slots": (
+ [SemaphoreSlots.from_dict(_item) for _item in obj["slots"]]
+ if obj.get("slots") is not None
+ else None
+ ),
+ "recentStepRuns": (
+ [RecentStepRuns.from_dict(_item) for _item in obj["recentStepRuns"]]
+ if obj.get("recentStepRuns") is not None
+ else None
+ ),
+ "status": obj.get("status"),
+ "maxRuns": obj.get("maxRuns"),
+ "availableRuns": obj.get("availableRuns"),
+ "dispatcherId": obj.get("dispatcherId"),
+ "labels": (
+ [WorkerLabel.from_dict(_item) for _item in obj["labels"]]
+ if obj.get("labels") is not None
+ else None
+ ),
+ "webhookUrl": obj.get("webhookUrl"),
+ "webhookId": obj.get("webhookId"),
+ "runtimeInfo": (
+ WorkerRuntimeInfo.from_dict(obj["runtimeInfo"])
+ if obj.get("runtimeInfo") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_label.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_label.py
new file mode 100644
index 00000000..151febce
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_label.py
@@ -0,0 +1,102 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class WorkerLabel(BaseModel):
+ """
+ WorkerLabel
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ key: StrictStr = Field(description="The key of the label.")
+ value: Optional[StrictStr] = Field(
+ default=None, description="The value of the label."
+ )
+ __properties: ClassVar[List[str]] = ["metadata", "key", "value"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkerLabel from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkerLabel from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "key": obj.get("key"),
+ "value": obj.get("value"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_list.py
new file mode 100644
index 00000000..bb02d792
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.worker import Worker
+
+
+class WorkerList(BaseModel):
+ """
+ WorkerList
+ """ # noqa: E501
+
+ pagination: Optional[PaginationResponse] = None
+ rows: Optional[List[Worker]] = None
+ __properties: ClassVar[List[str]] = ["pagination", "rows"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkerList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkerList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ "rows": (
+ [Worker.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_runtime_info.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_runtime_info.py
new file mode 100644
index 00000000..82db1f4a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_runtime_info.py
@@ -0,0 +1,103 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.worker_runtime_sdks import WorkerRuntimeSDKs
+
+
+class WorkerRuntimeInfo(BaseModel):
+ """
+ WorkerRuntimeInfo
+ """ # noqa: E501
+
+ sdk_version: Optional[StrictStr] = Field(default=None, alias="sdkVersion")
+ language: Optional[WorkerRuntimeSDKs] = None
+ language_version: Optional[StrictStr] = Field(default=None, alias="languageVersion")
+ os: Optional[StrictStr] = None
+ runtime_extra: Optional[StrictStr] = Field(default=None, alias="runtimeExtra")
+ __properties: ClassVar[List[str]] = [
+ "sdkVersion",
+ "language",
+ "languageVersion",
+ "os",
+ "runtimeExtra",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkerRuntimeInfo from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkerRuntimeInfo from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "sdkVersion": obj.get("sdkVersion"),
+ "language": obj.get("language"),
+ "languageVersion": obj.get("languageVersion"),
+ "os": obj.get("os"),
+ "runtimeExtra": obj.get("runtimeExtra"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_runtime_sdks.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_runtime_sdks.py
new file mode 100644
index 00000000..5716086e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_runtime_sdks.py
@@ -0,0 +1,38 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class WorkerRuntimeSDKs(str, Enum):
+ """
+ WorkerRuntimeSDKs
+ """
+
+ """
+ allowed enum values
+ """
+ GOLANG = "GOLANG"
+ PYTHON = "PYTHON"
+ TYPESCRIPT = "TYPESCRIPT"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of WorkerRuntimeSDKs from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_type.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_type.py
new file mode 100644
index 00000000..81269d2b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/worker_type.py
@@ -0,0 +1,38 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class WorkerType(str, Enum):
+ """
+ WorkerType
+ """
+
+ """
+ allowed enum values
+ """
+ SELFHOSTED = "SELFHOSTED"
+ MANAGED = "MANAGED"
+ WEBHOOK = "WEBHOOK"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of WorkerType from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow.py
new file mode 100644
index 00000000..f3107144
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow.py
@@ -0,0 +1,165 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.job import Job
+from hatchet_sdk.clients.rest.models.workflow_tag import WorkflowTag
+
+
+class Workflow(BaseModel):
+ """
+ Workflow
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ name: StrictStr = Field(description="The name of the workflow.")
+ description: Optional[StrictStr] = Field(
+ default=None, description="The description of the workflow."
+ )
+ is_paused: Optional[StrictBool] = Field(
+ default=None, description="Whether the workflow is paused.", alias="isPaused"
+ )
+ versions: Optional[List[WorkflowVersionMeta]] = None
+ tags: Optional[List[WorkflowTag]] = Field(
+ default=None, description="The tags of the workflow."
+ )
+ jobs: Optional[List[Job]] = Field(
+ default=None, description="The jobs of the workflow."
+ )
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "name",
+ "description",
+ "isPaused",
+ "versions",
+ "tags",
+ "jobs",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of Workflow from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in versions (list)
+ _items = []
+ if self.versions:
+ for _item_versions in self.versions:
+ if _item_versions:
+ _items.append(_item_versions.to_dict())
+ _dict["versions"] = _items
+ # override the default output from pydantic by calling `to_dict()` of each item in tags (list)
+ _items = []
+ if self.tags:
+ for _item_tags in self.tags:
+ if _item_tags:
+ _items.append(_item_tags.to_dict())
+ _dict["tags"] = _items
+ # override the default output from pydantic by calling `to_dict()` of each item in jobs (list)
+ _items = []
+ if self.jobs:
+ for _item_jobs in self.jobs:
+ if _item_jobs:
+ _items.append(_item_jobs.to_dict())
+ _dict["jobs"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of Workflow from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "name": obj.get("name"),
+ "description": obj.get("description"),
+ "isPaused": obj.get("isPaused"),
+ "versions": (
+ [WorkflowVersionMeta.from_dict(_item) for _item in obj["versions"]]
+ if obj.get("versions") is not None
+ else None
+ ),
+ "tags": (
+ [WorkflowTag.from_dict(_item) for _item in obj["tags"]]
+ if obj.get("tags") is not None
+ else None
+ ),
+ "jobs": (
+ [Job.from_dict(_item) for _item in obj["jobs"]]
+ if obj.get("jobs") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
+
+
+from hatchet_sdk.clients.rest.models.workflow_version_meta import WorkflowVersionMeta
+
+# TODO: Rewrite to not use raise_errors
+Workflow.model_rebuild(raise_errors=False)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_concurrency.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_concurrency.py
new file mode 100644
index 00000000..ce9d067e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_concurrency.py
@@ -0,0 +1,107 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.concurrency_limit_strategy import (
+ ConcurrencyLimitStrategy,
+)
+
+
+class WorkflowConcurrency(BaseModel):
+ """
+ WorkflowConcurrency
+ """ # noqa: E501
+
+ max_runs: StrictInt = Field(
+ description="The maximum number of concurrent workflow runs.", alias="maxRuns"
+ )
+ limit_strategy: ConcurrencyLimitStrategy = Field(
+ description="The strategy to use when the concurrency limit is reached.",
+ alias="limitStrategy",
+ )
+ get_concurrency_group: StrictStr = Field(
+ description="An action which gets the concurrency group for the WorkflowRun.",
+ alias="getConcurrencyGroup",
+ )
+ __properties: ClassVar[List[str]] = [
+ "maxRuns",
+ "limitStrategy",
+ "getConcurrencyGroup",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowConcurrency from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowConcurrency from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "maxRuns": obj.get("maxRuns"),
+ "limitStrategy": obj.get("limitStrategy"),
+ "getConcurrencyGroup": obj.get("getConcurrencyGroup"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_deployment_config.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_deployment_config.py
new file mode 100644
index 00000000..6ef1f60a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_deployment_config.py
@@ -0,0 +1,136 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.github_app_installation import (
+ GithubAppInstallation,
+)
+
+
+class WorkflowDeploymentConfig(BaseModel):
+ """
+ WorkflowDeploymentConfig
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ git_repo_name: StrictStr = Field(
+ description="The repository name.", alias="gitRepoName"
+ )
+ git_repo_owner: StrictStr = Field(
+ description="The repository owner.", alias="gitRepoOwner"
+ )
+ git_repo_branch: StrictStr = Field(
+ description="The repository branch.", alias="gitRepoBranch"
+ )
+ github_app_installation: Optional[GithubAppInstallation] = Field(
+ default=None,
+ description="The Github App installation.",
+ alias="githubAppInstallation",
+ )
+ github_app_installation_id: StrictStr = Field(
+ description="The id of the Github App installation.",
+ alias="githubAppInstallationId",
+ )
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "gitRepoName",
+ "gitRepoOwner",
+ "gitRepoBranch",
+ "githubAppInstallation",
+ "githubAppInstallationId",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowDeploymentConfig from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of github_app_installation
+ if self.github_app_installation:
+ _dict["githubAppInstallation"] = self.github_app_installation.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowDeploymentConfig from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "gitRepoName": obj.get("gitRepoName"),
+ "gitRepoOwner": obj.get("gitRepoOwner"),
+ "gitRepoBranch": obj.get("gitRepoBranch"),
+ "githubAppInstallation": (
+ GithubAppInstallation.from_dict(obj["githubAppInstallation"])
+ if obj.get("githubAppInstallation") is not None
+ else None
+ ),
+ "githubAppInstallationId": obj.get("githubAppInstallationId"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_kind.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_kind.py
new file mode 100644
index 00000000..e258a048
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_kind.py
@@ -0,0 +1,38 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class WorkflowKind(str, Enum):
+ """
+ WorkflowKind
+ """
+
+ """
+ allowed enum values
+ """
+ FUNCTION = "FUNCTION"
+ DURABLE = "DURABLE"
+ DAG = "DAG"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of WorkflowKind from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_list.py
new file mode 100644
index 00000000..9eb14aee
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_list.py
@@ -0,0 +1,120 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.workflow import Workflow
+
+
+class WorkflowList(BaseModel):
+ """
+ WorkflowList
+ """ # noqa: E501
+
+ metadata: Optional[APIResourceMeta] = None
+ rows: Optional[List[Workflow]] = None
+ pagination: Optional[PaginationResponse] = None
+ __properties: ClassVar[List[str]] = ["metadata", "rows", "pagination"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "rows": (
+ [Workflow.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_metrics.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_metrics.py
new file mode 100644
index 00000000..8158e732
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_metrics.py
@@ -0,0 +1,97 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt
+from typing_extensions import Self
+
+
+class WorkflowMetrics(BaseModel):
+ """
+ WorkflowMetrics
+ """ # noqa: E501
+
+ group_key_runs_count: Optional[StrictInt] = Field(
+ default=None,
+ description="The number of runs for a specific group key (passed via filter)",
+ alias="groupKeyRunsCount",
+ )
+ group_key_count: Optional[StrictInt] = Field(
+ default=None,
+ description="The total number of concurrency group keys.",
+ alias="groupKeyCount",
+ )
+ __properties: ClassVar[List[str]] = ["groupKeyRunsCount", "groupKeyCount"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowMetrics from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowMetrics from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "groupKeyRunsCount": obj.get("groupKeyRunsCount"),
+ "groupKeyCount": obj.get("groupKeyCount"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run.py
new file mode 100644
index 00000000..903da30f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run.py
@@ -0,0 +1,188 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Annotated, Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus
+from hatchet_sdk.clients.rest.models.workflow_run_triggered_by import (
+ WorkflowRunTriggeredBy,
+)
+from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion
+
+
+class WorkflowRun(BaseModel):
+ """
+ WorkflowRun
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ tenant_id: StrictStr = Field(alias="tenantId")
+ workflow_version_id: StrictStr = Field(alias="workflowVersionId")
+ workflow_version: Optional[WorkflowVersion] = Field(
+ default=None, alias="workflowVersion"
+ )
+ status: WorkflowRunStatus
+ display_name: Optional[StrictStr] = Field(default=None, alias="displayName")
+ job_runs: Optional[List[JobRun]] = Field(default=None, alias="jobRuns")
+ triggered_by: WorkflowRunTriggeredBy = Field(alias="triggeredBy")
+ input: Optional[Dict[str, Any]] = None
+ error: Optional[StrictStr] = None
+ started_at: Optional[datetime] = Field(default=None, alias="startedAt")
+ finished_at: Optional[datetime] = Field(default=None, alias="finishedAt")
+ duration: Optional[StrictInt] = None
+ parent_id: Optional[
+ Annotated[str, Field(min_length=36, strict=True, max_length=36)]
+ ] = Field(default=None, alias="parentId")
+ parent_step_run_id: Optional[
+ Annotated[str, Field(min_length=36, strict=True, max_length=36)]
+ ] = Field(default=None, alias="parentStepRunId")
+ additional_metadata: Optional[Dict[str, Any]] = Field(
+ default=None, alias="additionalMetadata"
+ )
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "tenantId",
+ "workflowVersionId",
+ "workflowVersion",
+ "status",
+ "displayName",
+ "jobRuns",
+ "triggeredBy",
+ "input",
+ "error",
+ "startedAt",
+ "finishedAt",
+ "duration",
+ "parentId",
+ "parentStepRunId",
+ "additionalMetadata",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowRun from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of workflow_version
+ if self.workflow_version:
+ _dict["workflowVersion"] = self.workflow_version.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in job_runs (list)
+ _items = []
+ if self.job_runs:
+ for _item_job_runs in self.job_runs:
+ if _item_job_runs:
+ _items.append(_item_job_runs.to_dict())
+ _dict["jobRuns"] = _items
+ # override the default output from pydantic by calling `to_dict()` of triggered_by
+ if self.triggered_by:
+ _dict["triggeredBy"] = self.triggered_by.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowRun from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "tenantId": obj.get("tenantId"),
+ "workflowVersionId": obj.get("workflowVersionId"),
+ "workflowVersion": (
+ WorkflowVersion.from_dict(obj["workflowVersion"])
+ if obj.get("workflowVersion") is not None
+ else None
+ ),
+ "status": obj.get("status"),
+ "displayName": obj.get("displayName"),
+ "jobRuns": (
+ [JobRun.from_dict(_item) for _item in obj["jobRuns"]]
+ if obj.get("jobRuns") is not None
+ else None
+ ),
+ "triggeredBy": (
+ WorkflowRunTriggeredBy.from_dict(obj["triggeredBy"])
+ if obj.get("triggeredBy") is not None
+ else None
+ ),
+ "input": obj.get("input"),
+ "error": obj.get("error"),
+ "startedAt": obj.get("startedAt"),
+ "finishedAt": obj.get("finishedAt"),
+ "duration": obj.get("duration"),
+ "parentId": obj.get("parentId"),
+ "parentStepRunId": obj.get("parentStepRunId"),
+ "additionalMetadata": obj.get("additionalMetadata"),
+ }
+ )
+ return _obj
+
+
+from hatchet_sdk.clients.rest.models.job_run import JobRun
+
+# TODO: Rewrite to not use raise_errors
+WorkflowRun.model_rebuild(raise_errors=False)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_cancel200_response.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_cancel200_response.py
new file mode 100644
index 00000000..fbf545be
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_cancel200_response.py
@@ -0,0 +1,85 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Annotated, Self
+
+
+class WorkflowRunCancel200Response(BaseModel):
+ """
+ WorkflowRunCancel200Response
+ """ # noqa: E501
+
+ workflow_run_ids: Optional[
+ List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]]
+ ] = Field(default=None, alias="workflowRunIds")
+ __properties: ClassVar[List[str]] = ["workflowRunIds"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowRunCancel200Response from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowRunCancel200Response from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"workflowRunIds": obj.get("workflowRunIds")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_list.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_list.py
new file mode 100644
index 00000000..a56d3feb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_list.py
@@ -0,0 +1,110 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse
+from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun
+
+
+class WorkflowRunList(BaseModel):
+ """
+ WorkflowRunList
+ """ # noqa: E501
+
+ rows: Optional[List[WorkflowRun]] = None
+ pagination: Optional[PaginationResponse] = None
+ __properties: ClassVar[List[str]] = ["rows", "pagination"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowRunList from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of each item in rows (list)
+ _items = []
+ if self.rows:
+ for _item_rows in self.rows:
+ if _item_rows:
+ _items.append(_item_rows.to_dict())
+ _dict["rows"] = _items
+ # override the default output from pydantic by calling `to_dict()` of pagination
+ if self.pagination:
+ _dict["pagination"] = self.pagination.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowRunList from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "rows": (
+ [WorkflowRun.from_dict(_item) for _item in obj["rows"]]
+ if obj.get("rows") is not None
+ else None
+ ),
+ "pagination": (
+ PaginationResponse.from_dict(obj["pagination"])
+ if obj.get("pagination") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_order_by_direction.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_order_by_direction.py
new file mode 100644
index 00000000..4b499699
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_order_by_direction.py
@@ -0,0 +1,37 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class WorkflowRunOrderByDirection(str, Enum):
+ """
+ WorkflowRunOrderByDirection
+ """
+
+ """
+ allowed enum values
+ """
+ ASC = "ASC"
+ DESC = "DESC"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of WorkflowRunOrderByDirection from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_order_by_field.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_order_by_field.py
new file mode 100644
index 00000000..bad42454
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_order_by_field.py
@@ -0,0 +1,39 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class WorkflowRunOrderByField(str, Enum):
+ """
+ WorkflowRunOrderByField
+ """
+
+ """
+ allowed enum values
+ """
+ CREATEDAT = "createdAt"
+ STARTEDAT = "startedAt"
+ FINISHEDAT = "finishedAt"
+ DURATION = "duration"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of WorkflowRunOrderByField from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_shape.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_shape.py
new file mode 100644
index 00000000..426e7ef9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_shape.py
@@ -0,0 +1,186 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from datetime import datetime
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Annotated, Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.job_run import JobRun
+from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus
+from hatchet_sdk.clients.rest.models.workflow_run_triggered_by import (
+ WorkflowRunTriggeredBy,
+)
+from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion
+
+
+class WorkflowRunShape(BaseModel):
+ """
+ WorkflowRunShape
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ tenant_id: StrictStr = Field(alias="tenantId")
+ workflow_id: Optional[StrictStr] = Field(default=None, alias="workflowId")
+ workflow_version_id: StrictStr = Field(alias="workflowVersionId")
+ workflow_version: Optional[WorkflowVersion] = Field(
+ default=None, alias="workflowVersion"
+ )
+ status: WorkflowRunStatus
+ display_name: Optional[StrictStr] = Field(default=None, alias="displayName")
+ job_runs: Optional[List[JobRun]] = Field(default=None, alias="jobRuns")
+ triggered_by: WorkflowRunTriggeredBy = Field(alias="triggeredBy")
+ input: Optional[Dict[str, Any]] = None
+ error: Optional[StrictStr] = None
+ started_at: Optional[datetime] = Field(default=None, alias="startedAt")
+ finished_at: Optional[datetime] = Field(default=None, alias="finishedAt")
+ duration: Optional[StrictInt] = None
+ parent_id: Optional[
+ Annotated[str, Field(min_length=36, strict=True, max_length=36)]
+ ] = Field(default=None, alias="parentId")
+ parent_step_run_id: Optional[
+ Annotated[str, Field(min_length=36, strict=True, max_length=36)]
+ ] = Field(default=None, alias="parentStepRunId")
+ additional_metadata: Optional[Dict[str, Any]] = Field(
+ default=None, alias="additionalMetadata"
+ )
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "tenantId",
+ "workflowId",
+ "workflowVersionId",
+ "workflowVersion",
+ "status",
+ "displayName",
+ "jobRuns",
+ "triggeredBy",
+ "input",
+ "error",
+ "startedAt",
+ "finishedAt",
+ "duration",
+ "parentId",
+ "parentStepRunId",
+ "additionalMetadata",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowRunShape from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of workflow_version
+ if self.workflow_version:
+ _dict["workflowVersion"] = self.workflow_version.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in job_runs (list)
+ _items = []
+ if self.job_runs:
+ for _item_job_runs in self.job_runs:
+ if _item_job_runs:
+ _items.append(_item_job_runs.to_dict())
+ _dict["jobRuns"] = _items
+ # override the default output from pydantic by calling `to_dict()` of triggered_by
+ if self.triggered_by:
+ _dict["triggeredBy"] = self.triggered_by.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowRunShape from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "tenantId": obj.get("tenantId"),
+ "workflowId": obj.get("workflowId"),
+ "workflowVersionId": obj.get("workflowVersionId"),
+ "workflowVersion": (
+ WorkflowVersion.from_dict(obj["workflowVersion"])
+ if obj.get("workflowVersion") is not None
+ else None
+ ),
+ "status": obj.get("status"),
+ "displayName": obj.get("displayName"),
+ "jobRuns": (
+ [JobRun.from_dict(_item) for _item in obj["jobRuns"]]
+ if obj.get("jobRuns") is not None
+ else None
+ ),
+ "triggeredBy": (
+ WorkflowRunTriggeredBy.from_dict(obj["triggeredBy"])
+ if obj.get("triggeredBy") is not None
+ else None
+ ),
+ "input": obj.get("input"),
+ "error": obj.get("error"),
+ "startedAt": obj.get("startedAt"),
+ "finishedAt": obj.get("finishedAt"),
+ "duration": obj.get("duration"),
+ "parentId": obj.get("parentId"),
+ "parentStepRunId": obj.get("parentStepRunId"),
+ "additionalMetadata": obj.get("additionalMetadata"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_status.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_status.py
new file mode 100644
index 00000000..06bdee87
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_status.py
@@ -0,0 +1,42 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+from enum import Enum
+
+from typing_extensions import Self
+
+
+class WorkflowRunStatus(str, Enum):
+ """
+ WorkflowRunStatus
+ """
+
+ """
+ allowed enum values
+ """
+ PENDING = "PENDING"
+ RUNNING = "RUNNING"
+ SUCCEEDED = "SUCCEEDED"
+ FAILED = "FAILED"
+ CANCELLED = "CANCELLED"
+ QUEUED = "QUEUED"
+ BACKOFF = "BACKOFF"
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of WorkflowRunStatus from a JSON string"""
+ return cls(json.loads(json_str))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_triggered_by.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_triggered_by.py
new file mode 100644
index 00000000..8cbe1c11
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_run_triggered_by.py
@@ -0,0 +1,112 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class WorkflowRunTriggeredBy(BaseModel):
+ """
+ WorkflowRunTriggeredBy
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ parent_workflow_run_id: Optional[StrictStr] = Field(
+ default=None, alias="parentWorkflowRunId"
+ )
+ event_id: Optional[StrictStr] = Field(default=None, alias="eventId")
+ cron_parent_id: Optional[StrictStr] = Field(default=None, alias="cronParentId")
+ cron_schedule: Optional[StrictStr] = Field(default=None, alias="cronSchedule")
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "parentWorkflowRunId",
+ "eventId",
+ "cronParentId",
+ "cronSchedule",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowRunTriggeredBy from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowRunTriggeredBy from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "parentWorkflowRunId": obj.get("parentWorkflowRunId"),
+ "eventId": obj.get("eventId"),
+ "cronParentId": obj.get("cronParentId"),
+ "cronSchedule": obj.get("cronSchedule"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_runs_cancel_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_runs_cancel_request.py
new file mode 100644
index 00000000..d5557cda
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_runs_cancel_request.py
@@ -0,0 +1,85 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field
+from typing_extensions import Annotated, Self
+
+
+class WorkflowRunsCancelRequest(BaseModel):
+ """
+ WorkflowRunsCancelRequest
+ """ # noqa: E501
+
+ workflow_run_ids: List[
+ Annotated[str, Field(min_length=36, strict=True, max_length=36)]
+ ] = Field(alias="workflowRunIds")
+ __properties: ClassVar[List[str]] = ["workflowRunIds"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowRunsCancelRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowRunsCancelRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"workflowRunIds": obj.get("workflowRunIds")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_runs_metrics.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_runs_metrics.py
new file mode 100644
index 00000000..71b6351b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_runs_metrics.py
@@ -0,0 +1,94 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict
+from typing_extensions import Self
+
+
+class WorkflowRunsMetrics(BaseModel):
+ """
+ WorkflowRunsMetrics
+ """ # noqa: E501
+
+ counts: Optional[Dict[str, Any]] = None
+ __properties: ClassVar[List[str]] = ["counts"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowRunsMetrics from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of counts
+ if self.counts:
+ _dict["counts"] = self.counts.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowRunsMetrics from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "counts": (
+ WorkflowRunsMetricsCounts.from_dict(obj["counts"])
+ if obj.get("counts") is not None
+ else None
+ )
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_runs_metrics_counts.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_runs_metrics_counts.py
new file mode 100644
index 00000000..e8c92256
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_runs_metrics_counts.py
@@ -0,0 +1,104 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt
+from typing_extensions import Self
+
+
+class WorkflowRunsMetricsCounts(BaseModel):
+ """
+ WorkflowRunsMetricsCounts
+ """ # noqa: E501
+
+ pending: Optional[StrictInt] = Field(default=None, alias="PENDING")
+ running: Optional[StrictInt] = Field(default=None, alias="RUNNING")
+ succeeded: Optional[StrictInt] = Field(default=None, alias="SUCCEEDED")
+ failed: Optional[StrictInt] = Field(default=None, alias="FAILED")
+ queued: Optional[StrictInt] = Field(default=None, alias="QUEUED")
+ cancelled: Optional[StrictInt] = Field(default=None, alias="CANCELLED")
+ __properties: ClassVar[List[str]] = [
+ "PENDING",
+ "RUNNING",
+ "SUCCEEDED",
+ "FAILED",
+ "QUEUED",
+ "CANCELLED",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowRunsMetricsCounts from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowRunsMetricsCounts from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "PENDING": obj.get("PENDING"),
+ "RUNNING": obj.get("RUNNING"),
+ "SUCCEEDED": obj.get("SUCCEEDED"),
+ "FAILED": obj.get("FAILED"),
+ "QUEUED": obj.get("QUEUED"),
+ "CANCELLED": obj.get("CANCELLED"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_tag.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_tag.py
new file mode 100644
index 00000000..fcd3423a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_tag.py
@@ -0,0 +1,84 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class WorkflowTag(BaseModel):
+ """
+ WorkflowTag
+ """ # noqa: E501
+
+ name: StrictStr = Field(description="The name of the workflow.")
+ color: StrictStr = Field(description="The description of the workflow.")
+ __properties: ClassVar[List[str]] = ["name", "color"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowTag from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowTag from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"name": obj.get("name"), "color": obj.get("color")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_trigger_cron_ref.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_trigger_cron_ref.py
new file mode 100644
index 00000000..1750e659
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_trigger_cron_ref.py
@@ -0,0 +1,86 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, StrictStr
+from typing_extensions import Self
+
+
+class WorkflowTriggerCronRef(BaseModel):
+ """
+ WorkflowTriggerCronRef
+ """ # noqa: E501
+
+ parent_id: Optional[StrictStr] = None
+ cron: Optional[StrictStr] = None
+ __properties: ClassVar[List[str]] = ["parent_id", "cron"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowTriggerCronRef from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowTriggerCronRef from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {"parent_id": obj.get("parent_id"), "cron": obj.get("cron")}
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_trigger_event_ref.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_trigger_event_ref.py
new file mode 100644
index 00000000..cfabbe02
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_trigger_event_ref.py
@@ -0,0 +1,86 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, StrictStr
+from typing_extensions import Self
+
+
+class WorkflowTriggerEventRef(BaseModel):
+ """
+ WorkflowTriggerEventRef
+ """ # noqa: E501
+
+ parent_id: Optional[StrictStr] = None
+ event_key: Optional[StrictStr] = None
+ __properties: ClassVar[List[str]] = ["parent_id", "event_key"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowTriggerEventRef from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowTriggerEventRef from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {"parent_id": obj.get("parent_id"), "event_key": obj.get("event_key")}
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_triggers.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_triggers.py
new file mode 100644
index 00000000..fd2f07ef
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_triggers.py
@@ -0,0 +1,141 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.workflow_trigger_cron_ref import (
+ WorkflowTriggerCronRef,
+)
+from hatchet_sdk.clients.rest.models.workflow_trigger_event_ref import (
+ WorkflowTriggerEventRef,
+)
+
+
+class WorkflowTriggers(BaseModel):
+ """
+ WorkflowTriggers
+ """ # noqa: E501
+
+ metadata: Optional[APIResourceMeta] = None
+ workflow_version_id: Optional[StrictStr] = None
+ tenant_id: Optional[StrictStr] = None
+ events: Optional[List[WorkflowTriggerEventRef]] = None
+ crons: Optional[List[WorkflowTriggerCronRef]] = None
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "workflow_version_id",
+ "tenant_id",
+ "events",
+ "crons",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowTriggers from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in events (list)
+ _items = []
+ if self.events:
+ for _item_events in self.events:
+ if _item_events:
+ _items.append(_item_events.to_dict())
+ _dict["events"] = _items
+ # override the default output from pydantic by calling `to_dict()` of each item in crons (list)
+ _items = []
+ if self.crons:
+ for _item_crons in self.crons:
+ if _item_crons:
+ _items.append(_item_crons.to_dict())
+ _dict["crons"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowTriggers from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "workflow_version_id": obj.get("workflow_version_id"),
+ "tenant_id": obj.get("tenant_id"),
+ "events": (
+ [
+ WorkflowTriggerEventRef.from_dict(_item)
+ for _item in obj["events"]
+ ]
+ if obj.get("events") is not None
+ else None
+ ),
+ "crons": (
+ [WorkflowTriggerCronRef.from_dict(_item) for _item in obj["crons"]]
+ if obj.get("crons") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_update_request.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_update_request.py
new file mode 100644
index 00000000..5ec56835
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_update_request.py
@@ -0,0 +1,85 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictBool
+from typing_extensions import Self
+
+
+class WorkflowUpdateRequest(BaseModel):
+ """
+ WorkflowUpdateRequest
+ """ # noqa: E501
+
+ is_paused: Optional[StrictBool] = Field(
+ default=None, description="Whether the workflow is paused.", alias="isPaused"
+ )
+ __properties: ClassVar[List[str]] = ["isPaused"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowUpdateRequest from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowUpdateRequest from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"isPaused": obj.get("isPaused")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version.py
new file mode 100644
index 00000000..47554e56
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version.py
@@ -0,0 +1,170 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+from hatchet_sdk.clients.rest.models.job import Job
+from hatchet_sdk.clients.rest.models.workflow import Workflow
+from hatchet_sdk.clients.rest.models.workflow_concurrency import WorkflowConcurrency
+from hatchet_sdk.clients.rest.models.workflow_triggers import WorkflowTriggers
+
+
+class WorkflowVersion(BaseModel):
+ """
+ WorkflowVersion
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ version: StrictStr = Field(description="The version of the workflow.")
+ order: StrictInt
+ workflow_id: StrictStr = Field(alias="workflowId")
+ sticky: Optional[StrictStr] = Field(
+ default=None, description="The sticky strategy of the workflow."
+ )
+ default_priority: Optional[StrictInt] = Field(
+ default=None,
+ description="The default priority of the workflow.",
+ alias="defaultPriority",
+ )
+ workflow: Optional[Workflow] = None
+ concurrency: Optional[WorkflowConcurrency] = None
+ triggers: Optional[WorkflowTriggers] = None
+ schedule_timeout: Optional[StrictStr] = Field(default=None, alias="scheduleTimeout")
+ jobs: Optional[List[Job]] = None
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "version",
+ "order",
+ "workflowId",
+ "sticky",
+ "defaultPriority",
+ "workflow",
+ "concurrency",
+ "triggers",
+ "scheduleTimeout",
+ "jobs",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowVersion from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of workflow
+ if self.workflow:
+ _dict["workflow"] = self.workflow.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of concurrency
+ if self.concurrency:
+ _dict["concurrency"] = self.concurrency.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of triggers
+ if self.triggers:
+ _dict["triggers"] = self.triggers.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in jobs (list)
+ _items = []
+ if self.jobs:
+ for _item_jobs in self.jobs:
+ if _item_jobs:
+ _items.append(_item_jobs.to_dict())
+ _dict["jobs"] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowVersion from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "version": obj.get("version"),
+ "order": obj.get("order"),
+ "workflowId": obj.get("workflowId"),
+ "sticky": obj.get("sticky"),
+ "defaultPriority": obj.get("defaultPriority"),
+ "workflow": (
+ Workflow.from_dict(obj["workflow"])
+ if obj.get("workflow") is not None
+ else None
+ ),
+ "concurrency": (
+ WorkflowConcurrency.from_dict(obj["concurrency"])
+ if obj.get("concurrency") is not None
+ else None
+ ),
+ "triggers": (
+ WorkflowTriggers.from_dict(obj["triggers"])
+ if obj.get("triggers") is not None
+ else None
+ ),
+ "scheduleTimeout": obj.get("scheduleTimeout"),
+ "jobs": (
+ [Job.from_dict(_item) for _item in obj["jobs"]]
+ if obj.get("jobs") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version_concurrency.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version_concurrency.py
new file mode 100644
index 00000000..d9893920
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version_concurrency.py
@@ -0,0 +1,114 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, Field, StrictInt, StrictStr, field_validator
+from typing_extensions import Self
+
+
+class WorkflowVersionConcurrency(BaseModel):
+ """
+ WorkflowVersionConcurrency
+ """ # noqa: E501
+
+ max_runs: StrictInt = Field(
+ description="The maximum number of concurrent workflow runs.", alias="maxRuns"
+ )
+ limit_strategy: StrictStr = Field(
+ description="The strategy to use when the concurrency limit is reached.",
+ alias="limitStrategy",
+ )
+ get_concurrency_group: StrictStr = Field(
+ description="An action which gets the concurrency group for the WorkflowRun.",
+ alias="getConcurrencyGroup",
+ )
+ __properties: ClassVar[List[str]] = [
+ "maxRuns",
+ "limitStrategy",
+ "getConcurrencyGroup",
+ ]
+
+ @field_validator("limit_strategy")
+ def limit_strategy_validate_enum(cls, value):
+ """Validates the enum"""
+ if value not in set(
+ ["CANCEL_IN_PROGRESS", "DROP_NEWEST", "QUEUE_NEWEST", "GROUP_ROUND_ROBIN"]
+ ):
+ raise ValueError(
+ "must be one of enum values ('CANCEL_IN_PROGRESS', 'DROP_NEWEST', 'QUEUE_NEWEST', 'GROUP_ROUND_ROBIN')"
+ )
+ return value
+
+ model_config = {
+ "populate_by_name": True,
+ "validate_assignment": True,
+ "protected_namespaces": (),
+ }
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowVersionConcurrency from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowVersionConcurrency from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "maxRuns": obj.get("maxRuns"),
+ "limitStrategy": obj.get("limitStrategy"),
+ "getConcurrencyGroup": obj.get("getConcurrencyGroup"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version_definition.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version_definition.py
new file mode 100644
index 00000000..3f44c23a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version_definition.py
@@ -0,0 +1,85 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing_extensions import Self
+
+
+class WorkflowVersionDefinition(BaseModel):
+ """
+ WorkflowVersionDefinition
+ """ # noqa: E501
+
+ raw_definition: StrictStr = Field(
+ description="The raw YAML definition of the workflow.", alias="rawDefinition"
+ )
+ __properties: ClassVar[List[str]] = ["rawDefinition"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowVersionDefinition from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowVersionDefinition from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({"rawDefinition": obj.get("rawDefinition")})
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version_meta.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version_meta.py
new file mode 100644
index 00000000..be2c5672
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_version_meta.py
@@ -0,0 +1,123 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Self
+
+from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
+
+
+class WorkflowVersionMeta(BaseModel):
+ """
+ WorkflowVersionMeta
+ """ # noqa: E501
+
+ metadata: APIResourceMeta
+ version: StrictStr = Field(description="The version of the workflow.")
+ order: StrictInt
+ workflow_id: StrictStr = Field(alias="workflowId")
+ workflow: Optional[Workflow] = None
+ __properties: ClassVar[List[str]] = [
+ "metadata",
+ "version",
+ "order",
+ "workflowId",
+ "workflow",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowVersionMeta from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of metadata
+ if self.metadata:
+ _dict["metadata"] = self.metadata.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of workflow
+ if self.workflow:
+ _dict["workflow"] = self.workflow.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowVersionMeta from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "metadata": (
+ APIResourceMeta.from_dict(obj["metadata"])
+ if obj.get("metadata") is not None
+ else None
+ ),
+ "version": obj.get("version"),
+ "order": obj.get("order"),
+ "workflowId": obj.get("workflowId"),
+ "workflow": (
+ Workflow.from_dict(obj["workflow"])
+ if obj.get("workflow") is not None
+ else None
+ ),
+ }
+ )
+ return _obj
+
+
+from hatchet_sdk.clients.rest.models.workflow import Workflow
+
+# TODO: Rewrite to not use raise_errors
+WorkflowVersionMeta.model_rebuild(raise_errors=False)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_workers_count.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_workers_count.py
new file mode 100644
index 00000000..c42edbf5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/models/workflow_workers_count.py
@@ -0,0 +1,95 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+
+import json
+import pprint
+import re # noqa: F401
+from typing import Any, ClassVar, Dict, List, Optional, Set
+
+from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
+from typing_extensions import Self
+
+
+class WorkflowWorkersCount(BaseModel):
+ """
+ WorkflowWorkersCount
+ """ # noqa: E501
+
+ free_slot_count: Optional[StrictInt] = Field(default=None, alias="freeSlotCount")
+ max_slot_count: Optional[StrictInt] = Field(default=None, alias="maxSlotCount")
+ workflow_run_id: Optional[StrictStr] = Field(default=None, alias="workflowRunId")
+ __properties: ClassVar[List[str]] = [
+ "freeSlotCount",
+ "maxSlotCount",
+ "workflowRunId",
+ ]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of WorkflowWorkersCount from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of WorkflowWorkersCount from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate(
+ {
+ "freeSlotCount": obj.get("freeSlotCount"),
+ "maxSlotCount": obj.get("maxSlotCount"),
+ "workflowRunId": obj.get("workflowRunId"),
+ }
+ )
+ return _obj
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/rest.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/rest.py
new file mode 100644
index 00000000..56286e14
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/rest.py
@@ -0,0 +1,187 @@
+# coding: utf-8
+
+"""
+ Hatchet API
+
+ The Hatchet API
+
+ The version of the OpenAPI document: 1.0.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import io
+import json
+import re
+import ssl
+from typing import Optional, Union
+
+import aiohttp
+import aiohttp_retry
+
+from hatchet_sdk.clients.rest.exceptions import ApiException, ApiValueError
+
+RESTResponseType = aiohttp.ClientResponse
+
+ALLOW_RETRY_METHODS = frozenset({"DELETE", "GET", "HEAD", "OPTIONS", "PUT", "TRACE"})
+
+
+class RESTResponse(io.IOBase):
+
+ def __init__(self, resp) -> None:
+ self.response = resp
+ self.status = resp.status
+ self.reason = resp.reason
+ self.data = None
+
+ async def read(self):
+ if self.data is None:
+ self.data = await self.response.read()
+ return self.data
+
+ def getheaders(self):
+ """Returns a CIMultiDictProxy of the response headers."""
+ return self.response.headers
+
+ def getheader(self, name, default=None):
+ """Returns a given response header."""
+ return self.response.headers.get(name, default)
+
+
+class RESTClientObject:
+
+ def __init__(self, configuration) -> None:
+
+ # maxsize is number of requests to host that are allowed in parallel
+ maxsize = configuration.connection_pool_maxsize
+
+ ssl_context = ssl.create_default_context(cafile=configuration.ssl_ca_cert)
+ if configuration.cert_file:
+ ssl_context.load_cert_chain(
+ configuration.cert_file, keyfile=configuration.key_file
+ )
+
+ if not configuration.verify_ssl:
+ ssl_context.check_hostname = False
+ ssl_context.verify_mode = ssl.CERT_NONE
+
+ connector = aiohttp.TCPConnector(limit=maxsize, ssl=ssl_context)
+
+ self.proxy = configuration.proxy
+ self.proxy_headers = configuration.proxy_headers
+
+ # https pool manager
+ self.pool_manager = aiohttp.ClientSession(connector=connector, trust_env=True)
+
+ retries = configuration.retries
+ self.retry_client: Optional[aiohttp_retry.RetryClient]
+ if retries is not None:
+ self.retry_client = aiohttp_retry.RetryClient(
+ client_session=self.pool_manager,
+ retry_options=aiohttp_retry.ExponentialRetry(
+ attempts=retries, factor=2.0, start_timeout=0.1, max_timeout=120.0
+ ),
+ )
+ else:
+ self.retry_client = None
+
+ async def close(self):
+ await self.pool_manager.close()
+ if self.retry_client is not None:
+ await self.retry_client.close()
+
+ async def request(
+ self,
+ method,
+ url,
+ headers=None,
+ body=None,
+ post_params=None,
+ _request_timeout=None,
+ ):
+ """Execute request
+
+ :param method: http request method
+ :param url: http request url
+ :param headers: http request headers
+ :param body: request json body, for `application/json`
+ :param post_params: request post parameters,
+ `application/x-www-form-urlencoded`
+ and `multipart/form-data`
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ """
+ method = method.upper()
+ assert method in ["GET", "HEAD", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"]
+
+ if post_params and body:
+ raise ApiValueError(
+ "body parameter cannot be used with post_params parameter."
+ )
+
+ post_params = post_params or {}
+ headers = headers or {}
+ # url already contains the URL query string
+ timeout = _request_timeout or 5 * 60
+
+ if "Content-Type" not in headers:
+ headers["Content-Type"] = "application/json"
+
+ args = {"method": method, "url": url, "timeout": timeout, "headers": headers}
+
+ if self.proxy:
+ args["proxy"] = self.proxy
+ if self.proxy_headers:
+ args["proxy_headers"] = self.proxy_headers
+
+ # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
+ if method in ["POST", "PUT", "PATCH", "OPTIONS", "DELETE"]:
+ if re.search("json", headers["Content-Type"], re.IGNORECASE):
+ if body is not None:
+ body = json.dumps(body)
+ args["data"] = body
+ elif headers["Content-Type"] == "application/x-www-form-urlencoded":
+ args["data"] = aiohttp.FormData(post_params)
+ elif headers["Content-Type"] == "multipart/form-data":
+ # must del headers['Content-Type'], or the correct
+ # Content-Type which generated by aiohttp
+ del headers["Content-Type"]
+ data = aiohttp.FormData()
+ for param in post_params:
+ k, v = param
+ if isinstance(v, tuple) and len(v) == 3:
+ data.add_field(k, value=v[1], filename=v[0], content_type=v[2])
+ else:
+ # Ensures that dict objects are serialized
+ if isinstance(v, dict):
+ v = json.dumps(v)
+ elif isinstance(v, int):
+ v = str(v)
+ data.add_field(k, v)
+ args["data"] = data
+
+ # Pass a `bytes` or `str` parameter directly in the body to support
+ # other content types than Json when `body` argument is provided
+ # in serialized form
+ elif isinstance(body, str) or isinstance(body, bytes):
+ args["data"] = body
+ else:
+ # Cannot generate the request from given parameters
+ msg = """Cannot prepare a request message for provided
+ arguments. Please check that your arguments match
+ declared content type."""
+ raise ApiException(status=0, reason=msg)
+
+ pool_manager: Union[aiohttp.ClientSession, aiohttp_retry.RetryClient]
+ if self.retry_client is not None and method in ALLOW_RETRY_METHODS:
+ pool_manager = self.retry_client
+ else:
+ pool_manager = self.pool_manager
+
+ r = await pool_manager.request(**args)
+
+ return RESTResponse(r)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/tenacity_utils.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/tenacity_utils.py
new file mode 100644
index 00000000..377266a1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest/tenacity_utils.py
@@ -0,0 +1,39 @@
+from typing import Callable, ParamSpec, TypeVar
+
+import grpc
+import tenacity
+
+from hatchet_sdk.logger import logger
+
+P = ParamSpec("P")
+R = TypeVar("R")
+
+
+def tenacity_retry(func: Callable[P, R]) -> Callable[P, R]:
+ return tenacity.retry(
+ reraise=True,
+ wait=tenacity.wait_exponential_jitter(),
+ stop=tenacity.stop_after_attempt(5),
+ before_sleep=tenacity_alert_retry,
+ retry=tenacity.retry_if_exception(tenacity_should_retry),
+ )(func)
+
+
+def tenacity_alert_retry(retry_state: tenacity.RetryCallState) -> None:
+ """Called between tenacity retries."""
+ logger.debug(
+ f"Retrying {retry_state.fn}: attempt "
+ f"{retry_state.attempt_number} ended with: {retry_state.outcome}",
+ )
+
+
+def tenacity_should_retry(ex: Exception) -> bool:
+ if isinstance(ex, (grpc.aio.AioRpcError, grpc.RpcError)):
+ if ex.code() in [
+ grpc.StatusCode.UNIMPLEMENTED,
+ grpc.StatusCode.NOT_FOUND,
+ ]:
+ return False
+ return True
+ else:
+ return False
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest_client.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest_client.py
new file mode 100644
index 00000000..f6458e5a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/rest_client.py
@@ -0,0 +1,611 @@
+import asyncio
+import atexit
+import datetime
+import threading
+from typing import Any, Coroutine, List
+
+from pydantic import StrictInt
+
+from hatchet_sdk.clients.rest.api.event_api import EventApi
+from hatchet_sdk.clients.rest.api.log_api import LogApi
+from hatchet_sdk.clients.rest.api.step_run_api import StepRunApi
+from hatchet_sdk.clients.rest.api.workflow_api import WorkflowApi
+from hatchet_sdk.clients.rest.api.workflow_run_api import WorkflowRunApi
+from hatchet_sdk.clients.rest.api.workflow_runs_api import WorkflowRunsApi
+from hatchet_sdk.clients.rest.api_client import ApiClient
+from hatchet_sdk.clients.rest.configuration import Configuration
+from hatchet_sdk.clients.rest.models import TriggerWorkflowRunRequest
+from hatchet_sdk.clients.rest.models.create_cron_workflow_trigger_request import (
+ CreateCronWorkflowTriggerRequest,
+)
+from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows
+from hatchet_sdk.clients.rest.models.cron_workflows_order_by_field import (
+ CronWorkflowsOrderByField,
+)
+from hatchet_sdk.clients.rest.models.event_list import EventList
+from hatchet_sdk.clients.rest.models.event_order_by_direction import (
+ EventOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.event_order_by_field import EventOrderByField
+from hatchet_sdk.clients.rest.models.log_line_level import LogLineLevel
+from hatchet_sdk.clients.rest.models.log_line_list import LogLineList
+from hatchet_sdk.clients.rest.models.log_line_order_by_direction import (
+ LogLineOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.log_line_order_by_field import LogLineOrderByField
+from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest
+from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import (
+ ReplayWorkflowRunsRequest,
+)
+from hatchet_sdk.clients.rest.models.replay_workflow_runs_response import (
+ ReplayWorkflowRunsResponse,
+)
+from hatchet_sdk.clients.rest.models.schedule_workflow_run_request import (
+ ScheduleWorkflowRunRequest,
+)
+from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows
+from hatchet_sdk.clients.rest.models.scheduled_workflows_order_by_field import (
+ ScheduledWorkflowsOrderByField,
+)
+from hatchet_sdk.clients.rest.models.workflow import Workflow
+from hatchet_sdk.clients.rest.models.workflow_kind import WorkflowKind
+from hatchet_sdk.clients.rest.models.workflow_list import WorkflowList
+from hatchet_sdk.clients.rest.models.workflow_run import WorkflowRun
+from hatchet_sdk.clients.rest.models.workflow_run_cancel200_response import (
+ WorkflowRunCancel200Response,
+)
+from hatchet_sdk.clients.rest.models.workflow_run_list import WorkflowRunList
+from hatchet_sdk.clients.rest.models.workflow_run_order_by_direction import (
+ WorkflowRunOrderByDirection,
+)
+from hatchet_sdk.clients.rest.models.workflow_run_order_by_field import (
+ WorkflowRunOrderByField,
+)
+from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus
+from hatchet_sdk.clients.rest.models.workflow_runs_cancel_request import (
+ WorkflowRunsCancelRequest,
+)
+from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion
+
+
+class AsyncRestApi:
+ def __init__(self, host: str, api_key: str, tenant_id: str):
+ self.tenant_id = tenant_id
+
+ self.config = Configuration(
+ host=host,
+ access_token=api_key,
+ )
+
+ self._api_client = None
+ self._workflow_api = None
+ self._workflow_run_api = None
+ self._step_run_api = None
+ self._event_api = None
+ self._log_api = None
+
+ @property
+ def api_client(self):
+ if self._api_client is None:
+ self._api_client = ApiClient(configuration=self.config)
+ return self._api_client
+
+ @property
+ def workflow_api(self):
+ if self._workflow_api is None:
+ self._workflow_api = WorkflowApi(self.api_client)
+ return self._workflow_api
+
+ @property
+ def workflow_run_api(self):
+ if self._workflow_run_api is None:
+ self._workflow_run_api = WorkflowRunApi(self.api_client)
+ return self._workflow_run_api
+
+ @property
+ def step_run_api(self):
+ if self._step_run_api is None:
+ self._step_run_api = StepRunApi(self.api_client)
+ return self._step_run_api
+
+ @property
+ def event_api(self):
+ if self._event_api is None:
+ self._event_api = EventApi(self.api_client)
+ return self._event_api
+
+ @property
+ def log_api(self):
+ if self._log_api is None:
+ self._log_api = LogApi(self.api_client)
+ return self._log_api
+
+ async def close(self):
+ # Ensure the aiohttp client session is closed
+ if self._api_client is not None:
+ await self._api_client.close()
+
+ async def workflow_list(self) -> WorkflowList:
+ return await self.workflow_api.workflow_list(
+ tenant=self.tenant_id,
+ )
+
+ async def workflow_get(self, workflow_id: str) -> Workflow:
+ return await self.workflow_api.workflow_get(
+ workflow=workflow_id,
+ )
+
+ async def workflow_version_get(
+ self, workflow_id: str, version: str | None = None
+ ) -> WorkflowVersion:
+ return await self.workflow_api.workflow_version_get(
+ workflow=workflow_id,
+ version=version,
+ )
+
+ async def workflow_run_list(
+ self,
+ workflow_id: str | None = None,
+ offset: int | None = None,
+ limit: int | None = None,
+ event_id: str | None = None,
+ parent_workflow_run_id: str | None = None,
+ parent_step_run_id: str | None = None,
+ statuses: list[WorkflowRunStatus] | None = None,
+ kinds: list[WorkflowKind] | None = None,
+ additional_metadata: list[str] | None = None,
+ order_by_field: WorkflowRunOrderByField | None = None,
+ order_by_direction: WorkflowRunOrderByDirection | None = None,
+ ) -> WorkflowRunList:
+ return await self.workflow_api.workflow_run_list(
+ tenant=self.tenant_id,
+ offset=offset,
+ limit=limit,
+ workflow_id=workflow_id,
+ event_id=event_id,
+ parent_workflow_run_id=parent_workflow_run_id,
+ parent_step_run_id=parent_step_run_id,
+ statuses=statuses,
+ kinds=kinds,
+ additional_metadata=additional_metadata,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ )
+
+ async def workflow_run_get(self, workflow_run_id: str) -> WorkflowRun:
+ return await self.workflow_api.workflow_run_get(
+ tenant=self.tenant_id,
+ workflow_run=workflow_run_id,
+ )
+
+ async def workflow_run_replay(
+ self, workflow_run_ids: list[str]
+ ) -> ReplayWorkflowRunsResponse:
+ return await self.workflow_run_api.workflow_run_update_replay(
+ tenant=self.tenant_id,
+ replay_workflow_runs_request=ReplayWorkflowRunsRequest(
+ workflow_run_ids=workflow_run_ids,
+ ),
+ )
+
+ async def workflow_run_cancel(
+ self, workflow_run_id: str
+ ) -> WorkflowRunCancel200Response:
+ return await self.workflow_run_api.workflow_run_cancel(
+ tenant=self.tenant_id,
+ workflow_runs_cancel_request=WorkflowRunsCancelRequest(
+ workflowRunIds=[workflow_run_id],
+ ),
+ )
+
+ async def workflow_run_bulk_cancel(
+ self, workflow_run_ids: list[str]
+ ) -> WorkflowRunCancel200Response:
+ return await self.workflow_run_api.workflow_run_cancel(
+ tenant=self.tenant_id,
+ workflow_runs_cancel_request=WorkflowRunsCancelRequest(
+ workflowRunIds=workflow_run_ids,
+ ),
+ )
+
+ async def workflow_run_create(
+ self,
+ workflow_id: str,
+ input: dict[str, Any],
+ version: str | None = None,
+ additional_metadata: list[str] | None = None,
+ ) -> WorkflowRun:
+ return await self.workflow_run_api.workflow_run_create(
+ workflow=workflow_id,
+ version=version,
+ trigger_workflow_run_request=TriggerWorkflowRunRequest(
+ input=input,
+ additional_metadata=additional_metadata,
+ ),
+ )
+
+ async def cron_create(
+ self,
+ workflow_name: str,
+ cron_name: str,
+ expression: str,
+ input: dict[str, Any],
+ additional_metadata: dict[str, str],
+ ):
+ return await self.workflow_run_api.cron_workflow_trigger_create(
+ tenant=self.tenant_id,
+ workflow=workflow_name,
+ create_cron_workflow_trigger_request=CreateCronWorkflowTriggerRequest(
+ cronName=cron_name,
+ cronExpression=expression,
+ input=input,
+ additional_metadata=additional_metadata,
+ ),
+ )
+
+ async def cron_delete(self, cron_trigger_id: str):
+ return await self.workflow_api.workflow_cron_delete(
+ tenant=self.tenant_id,
+ cron_workflow=cron_trigger_id,
+ )
+
+ async def cron_list(
+ self,
+ offset: StrictInt | None = None,
+ limit: StrictInt | None = None,
+ workflow_id: str | None = None,
+ additional_metadata: list[str] | None = None,
+ order_by_field: CronWorkflowsOrderByField | None = None,
+ order_by_direction: WorkflowRunOrderByDirection | None = None,
+ ):
+ return await self.workflow_api.cron_workflow_list(
+ tenant=self.tenant_id,
+ offset=offset,
+ limit=limit,
+ workflow_id=workflow_id,
+ additional_metadata=additional_metadata,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ )
+
+ async def cron_get(self, cron_trigger_id: str):
+ return await self.workflow_api.workflow_cron_get(
+ tenant=self.tenant_id,
+ cron_workflow=cron_trigger_id,
+ )
+
+ async def schedule_create(
+ self,
+ name: str,
+ trigger_at: datetime.datetime,
+ input: dict[str, Any],
+ additional_metadata: dict[str, str],
+ ):
+ return await self.workflow_run_api.scheduled_workflow_run_create(
+ tenant=self.tenant_id,
+ workflow=name,
+ schedule_workflow_run_request=ScheduleWorkflowRunRequest(
+ triggerAt=trigger_at,
+ input=input,
+ additional_metadata=additional_metadata,
+ ),
+ )
+
+ async def schedule_delete(self, scheduled_trigger_id: str):
+ return await self.workflow_api.workflow_scheduled_delete(
+ tenant=self.tenant_id,
+ scheduled_workflow_run=scheduled_trigger_id,
+ )
+
+ async def schedule_list(
+ self,
+ offset: StrictInt | None = None,
+ limit: StrictInt | None = None,
+ workflow_id: str | None = None,
+ additional_metadata: list[str] | None = None,
+ parent_workflow_run_id: str | None = None,
+ parent_step_run_id: str | None = None,
+ order_by_field: ScheduledWorkflowsOrderByField | None = None,
+ order_by_direction: WorkflowRunOrderByDirection | None = None,
+ ):
+ return await self.workflow_api.workflow_scheduled_list(
+ tenant=self.tenant_id,
+ offset=offset,
+ limit=limit,
+ workflow_id=workflow_id,
+ parent_workflow_run_id=parent_workflow_run_id,
+ parent_step_run_id=parent_step_run_id,
+ additional_metadata=additional_metadata,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ )
+
+ async def schedule_get(self, scheduled_trigger_id: str):
+ return await self.workflow_api.workflow_scheduled_get(
+ tenant=self.tenant_id,
+ scheduled_workflow_run=scheduled_trigger_id,
+ )
+
+ async def list_logs(
+ self,
+ step_run_id: str,
+ offset: int | None = None,
+ limit: int | None = None,
+ levels: list[LogLineLevel] | None = None,
+ search: str | None = None,
+ order_by_field: LogLineOrderByField | None = None,
+ order_by_direction: LogLineOrderByDirection | None = None,
+ ) -> LogLineList:
+ return await self.log_api.log_line_list(
+ step_run=step_run_id,
+ offset=offset,
+ limit=limit,
+ levels=levels,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ )
+
+ async def events_list(
+ self,
+ offset: int | None = None,
+ limit: int | None = None,
+ keys: list[str] | None = None,
+ workflows: list[str] | None = None,
+ statuses: list[WorkflowRunStatus] | None = None,
+ search: str | None = None,
+ order_by_field: EventOrderByField | None = None,
+ order_by_direction: EventOrderByDirection | None = None,
+ additional_metadata: list[str] | None = None,
+ ) -> EventList:
+ return await self.event_api.event_list(
+ tenant=self.tenant_id,
+ offset=offset,
+ limit=limit,
+ keys=keys,
+ workflows=workflows,
+ statuses=statuses,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ additional_metadata=additional_metadata,
+ )
+
+ async def events_replay(self, event_ids: list[str] | EventList) -> EventList:
+ if isinstance(event_ids, EventList):
+ event_ids = [r.metadata.id for r in event_ids.rows]
+
+ return self.event_api.event_update_replay(
+ tenant=self.tenant_id,
+ replay_event_request=ReplayEventRequest(eventIds=event_ids),
+ )
+
+
+class RestApi:
+ def __init__(self, host: str, api_key: str, tenant_id: str):
+ self._loop = asyncio.new_event_loop()
+ self._thread = threading.Thread(target=self._run_event_loop, daemon=True)
+ self._thread.start()
+
+ # Initialize AsyncRestApi inside the event loop to ensure an active loop
+ self.aio = AsyncRestApi(host, api_key, tenant_id)
+
+ # Register the cleanup method to be called on exit
+ atexit.register(self._cleanup)
+
+ def _cleanup(self):
+ """
+ Stop the running thread and clean up the event loop.
+ """
+ self._run_coroutine(self.aio.close())
+ self._loop.call_soon_threadsafe(self._loop.stop)
+ self._thread.join()
+
+ def _run_event_loop(self):
+ """
+ Run the asyncio event loop in a separate thread.
+ """
+ asyncio.set_event_loop(self._loop)
+ self._loop.run_forever()
+
+ def _run_coroutine(self, coro) -> Any:
+ """
+ Execute a coroutine in the event loop and return the result.
+ """
+ future = asyncio.run_coroutine_threadsafe(coro, self._loop)
+ return future.result()
+
+ def workflow_list(self) -> WorkflowList:
+ return self._run_coroutine(self.aio.workflow_list())
+
+ def workflow_get(self, workflow_id: str) -> Workflow:
+ return self._run_coroutine(self.aio.workflow_get(workflow_id))
+
+ def workflow_version_get(
+ self, workflow_id: str, version: str | None = None
+ ) -> WorkflowVersion:
+ return self._run_coroutine(self.aio.workflow_version_get(workflow_id, version))
+
+ def workflow_run_list(
+ self,
+ workflow_id: str | None = None,
+ offset: int | None = None,
+ limit: int | None = None,
+ event_id: str | None = None,
+ parent_workflow_run_id: str | None = None,
+ parent_step_run_id: str | None = None,
+ statuses: list[WorkflowRunStatus] | None = None,
+ kinds: list[WorkflowKind] | None = None,
+ additional_metadata: list[str] | None = None,
+ order_by_field: WorkflowRunOrderByField | None = None,
+ order_by_direction: WorkflowRunOrderByDirection | None = None,
+ ) -> WorkflowRunList:
+ return self._run_coroutine(
+ self.aio.workflow_run_list(
+ workflow_id=workflow_id,
+ offset=offset,
+ limit=limit,
+ event_id=event_id,
+ parent_workflow_run_id=parent_workflow_run_id,
+ parent_step_run_id=parent_step_run_id,
+ statuses=statuses,
+ kinds=kinds,
+ additional_metadata=additional_metadata,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ )
+ )
+
+ def workflow_run_get(self, workflow_run_id: str) -> WorkflowRun:
+ return self._run_coroutine(self.aio.workflow_run_get(workflow_run_id))
+
+ def workflow_run_cancel(self, workflow_run_id: str) -> WorkflowRunCancel200Response:
+ return self._run_coroutine(self.aio.workflow_run_cancel(workflow_run_id))
+
+ def workflow_run_bulk_cancel(
+ self, workflow_run_ids: list[str]
+ ) -> WorkflowRunCancel200Response:
+ return self._run_coroutine(self.aio.workflow_run_bulk_cancel(workflow_run_ids))
+
+ def workflow_run_create(
+ self,
+ workflow_id: str,
+ input: dict[str, Any],
+ version: str | None = None,
+ additional_metadata: list[str] | None = None,
+ ) -> WorkflowRun:
+ return self._run_coroutine(
+ self.aio.workflow_run_create(
+ workflow_id, input, version, additional_metadata
+ )
+ )
+
+ def cron_create(
+ self,
+ workflow_name: str,
+ cron_name: str,
+ expression: str,
+ input: dict[str, Any],
+ additional_metadata: dict[str, str],
+ ) -> CronWorkflows:
+ return self._run_coroutine(
+ self.aio.cron_create(
+ workflow_name, cron_name, expression, input, additional_metadata
+ )
+ )
+
+ def cron_delete(self, cron_trigger_id: str):
+ return self._run_coroutine(self.aio.cron_delete(cron_trigger_id))
+
+ def cron_list(
+ self,
+ offset: int | None = None,
+ limit: int | None = None,
+ workflow_id: str | None = None,
+ additional_metadata: list[str] | None = None,
+ order_by_field: CronWorkflowsOrderByField | None = None,
+ order_by_direction: WorkflowRunOrderByDirection | None = None,
+ ):
+ return self._run_coroutine(
+ self.aio.cron_list(
+ offset,
+ limit,
+ workflow_id,
+ additional_metadata,
+ order_by_field,
+ order_by_direction,
+ )
+ )
+
+ def cron_get(self, cron_trigger_id: str):
+ return self._run_coroutine(self.aio.cron_get(cron_trigger_id))
+
+ def schedule_create(
+ self,
+ workflow_name: str,
+ trigger_at: datetime.datetime,
+ input: dict[str, Any],
+ additional_metadata: dict[str, str],
+ ):
+ return self._run_coroutine(
+ self.aio.schedule_create(
+ workflow_name, trigger_at, input, additional_metadata
+ )
+ )
+
+ def schedule_delete(self, scheduled_trigger_id: str):
+ return self._run_coroutine(self.aio.schedule_delete(scheduled_trigger_id))
+
+ def schedule_list(
+ self,
+ offset: int | None = None,
+ limit: int | None = None,
+ workflow_id: str | None = None,
+ additional_metadata: list[str] | None = None,
+ order_by_field: CronWorkflowsOrderByField | None = None,
+ order_by_direction: WorkflowRunOrderByDirection | None = None,
+ ):
+ return self._run_coroutine(
+ self.aio.schedule_list(
+ offset,
+ limit,
+ workflow_id,
+ additional_metadata,
+ order_by_field,
+ order_by_direction,
+ )
+ )
+
+ def schedule_get(self, scheduled_trigger_id: str):
+ return self._run_coroutine(self.aio.schedule_get(scheduled_trigger_id))
+
+ def list_logs(
+ self,
+ step_run_id: str,
+ offset: int | None = None,
+ limit: int | None = None,
+ levels: list[LogLineLevel] | None = None,
+ search: str | None = None,
+ order_by_field: LogLineOrderByField | None = None,
+ order_by_direction: LogLineOrderByDirection | None = None,
+ ) -> LogLineList:
+ return self._run_coroutine(
+ self.aio.list_logs(
+ step_run_id=step_run_id,
+ offset=offset,
+ limit=limit,
+ levels=levels,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ )
+ )
+
+ def events_list(
+ self,
+ offset: int | None = None,
+ limit: int | None = None,
+ keys: list[str] | None = None,
+ workflows: list[str] | None = None,
+ statuses: list[WorkflowRunStatus] | None = None,
+ search: str | None = None,
+ order_by_field: EventOrderByField | None = None,
+ order_by_direction: EventOrderByDirection | None = None,
+ additional_metadata: list[str] | None = None,
+ ) -> EventList:
+ return self._run_coroutine(
+ self.aio.events_list(
+ offset=offset,
+ limit=limit,
+ keys=keys,
+ workflows=workflows,
+ statuses=statuses,
+ search=search,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ additional_metadata=additional_metadata,
+ )
+ )
+
+ def events_replay(self, event_ids: list[str] | EventList) -> EventList:
+ return self._run_coroutine(self.aio.events_replay(event_ids))
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/run_event_listener.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/run_event_listener.py
new file mode 100644
index 00000000..b5db6a74
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/run_event_listener.py
@@ -0,0 +1,260 @@
+import asyncio
+import json
+from typing import AsyncGenerator
+
+import grpc
+
+from hatchet_sdk.connection import new_conn
+from hatchet_sdk.contracts.dispatcher_pb2 import (
+ RESOURCE_TYPE_STEP_RUN,
+ RESOURCE_TYPE_WORKFLOW_RUN,
+ ResourceEventType,
+ SubscribeToWorkflowEventsRequest,
+ WorkflowEvent,
+)
+from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub
+
+from ..loader import ClientConfig
+from ..metadata import get_metadata
+
+DEFAULT_ACTION_LISTENER_RETRY_INTERVAL = 5 # seconds
+DEFAULT_ACTION_LISTENER_RETRY_COUNT = 5
+
+
+class StepRunEventType:
+ STEP_RUN_EVENT_TYPE_STARTED = "STEP_RUN_EVENT_TYPE_STARTED"
+ STEP_RUN_EVENT_TYPE_COMPLETED = "STEP_RUN_EVENT_TYPE_COMPLETED"
+ STEP_RUN_EVENT_TYPE_FAILED = "STEP_RUN_EVENT_TYPE_FAILED"
+ STEP_RUN_EVENT_TYPE_CANCELLED = "STEP_RUN_EVENT_TYPE_CANCELLED"
+ STEP_RUN_EVENT_TYPE_TIMED_OUT = "STEP_RUN_EVENT_TYPE_TIMED_OUT"
+ STEP_RUN_EVENT_TYPE_STREAM = "STEP_RUN_EVENT_TYPE_STREAM"
+
+
+class WorkflowRunEventType:
+ WORKFLOW_RUN_EVENT_TYPE_STARTED = "WORKFLOW_RUN_EVENT_TYPE_STARTED"
+ WORKFLOW_RUN_EVENT_TYPE_COMPLETED = "WORKFLOW_RUN_EVENT_TYPE_COMPLETED"
+ WORKFLOW_RUN_EVENT_TYPE_FAILED = "WORKFLOW_RUN_EVENT_TYPE_FAILED"
+ WORKFLOW_RUN_EVENT_TYPE_CANCELLED = "WORKFLOW_RUN_EVENT_TYPE_CANCELLED"
+ WORKFLOW_RUN_EVENT_TYPE_TIMED_OUT = "WORKFLOW_RUN_EVENT_TYPE_TIMED_OUT"
+
+
+step_run_event_type_mapping = {
+ ResourceEventType.RESOURCE_EVENT_TYPE_STARTED: StepRunEventType.STEP_RUN_EVENT_TYPE_STARTED,
+ ResourceEventType.RESOURCE_EVENT_TYPE_COMPLETED: StepRunEventType.STEP_RUN_EVENT_TYPE_COMPLETED,
+ ResourceEventType.RESOURCE_EVENT_TYPE_FAILED: StepRunEventType.STEP_RUN_EVENT_TYPE_FAILED,
+ ResourceEventType.RESOURCE_EVENT_TYPE_CANCELLED: StepRunEventType.STEP_RUN_EVENT_TYPE_CANCELLED,
+ ResourceEventType.RESOURCE_EVENT_TYPE_TIMED_OUT: StepRunEventType.STEP_RUN_EVENT_TYPE_TIMED_OUT,
+ ResourceEventType.RESOURCE_EVENT_TYPE_STREAM: StepRunEventType.STEP_RUN_EVENT_TYPE_STREAM,
+}
+
+workflow_run_event_type_mapping = {
+ ResourceEventType.RESOURCE_EVENT_TYPE_STARTED: WorkflowRunEventType.WORKFLOW_RUN_EVENT_TYPE_STARTED,
+ ResourceEventType.RESOURCE_EVENT_TYPE_COMPLETED: WorkflowRunEventType.WORKFLOW_RUN_EVENT_TYPE_COMPLETED,
+ ResourceEventType.RESOURCE_EVENT_TYPE_FAILED: WorkflowRunEventType.WORKFLOW_RUN_EVENT_TYPE_FAILED,
+ ResourceEventType.RESOURCE_EVENT_TYPE_CANCELLED: WorkflowRunEventType.WORKFLOW_RUN_EVENT_TYPE_CANCELLED,
+ ResourceEventType.RESOURCE_EVENT_TYPE_TIMED_OUT: WorkflowRunEventType.WORKFLOW_RUN_EVENT_TYPE_TIMED_OUT,
+}
+
+
+class StepRunEvent:
+ def __init__(self, type: StepRunEventType, payload: str):
+ self.type = type
+ self.payload = payload
+
+
+def new_listener(config: ClientConfig):
+ return RunEventListenerClient(config=config)
+
+
+class RunEventListener:
+
+ workflow_run_id: str = None
+ additional_meta_kv: tuple[str, str] = None
+
+ def __init__(self, client: DispatcherStub, token: str):
+ self.client = client
+ self.stop_signal = False
+ self.token = token
+
+ @classmethod
+ def for_run_id(cls, workflow_run_id: str, client: DispatcherStub, token: str):
+ listener = RunEventListener(client, token)
+ listener.workflow_run_id = workflow_run_id
+ return listener
+
+ @classmethod
+ def for_additional_meta(
+ cls, key: str, value: str, client: DispatcherStub, token: str
+ ):
+ listener = RunEventListener(client, token)
+ listener.additional_meta_kv = (key, value)
+ return listener
+
+ def abort(self):
+ self.stop_signal = True
+
+ def __aiter__(self):
+ return self._generator()
+
+ async def __anext__(self):
+ return await self._generator().__anext__()
+
+ def __iter__(self):
+ try:
+ loop = asyncio.get_event_loop()
+ except RuntimeError as e:
+ if str(e).startswith("There is no current event loop in thread"):
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+ else:
+ raise e
+
+ async_iter = self.__aiter__()
+
+ while True:
+ try:
+ future = asyncio.ensure_future(async_iter.__anext__())
+ yield loop.run_until_complete(future)
+ except StopAsyncIteration:
+ break
+ except Exception as e:
+ print(f"Error in synchronous iterator: {e}")
+ break
+
+ async def _generator(self) -> AsyncGenerator[StepRunEvent, None]:
+ while True:
+ if self.stop_signal:
+ listener = None
+ break
+
+ listener = await self.retry_subscribe()
+
+ try:
+ async for workflow_event in listener:
+ eventType = None
+ if workflow_event.resourceType == RESOURCE_TYPE_STEP_RUN:
+ if workflow_event.eventType in step_run_event_type_mapping:
+ eventType = step_run_event_type_mapping[
+ workflow_event.eventType
+ ]
+ else:
+ raise Exception(
+ f"Unknown event type: {workflow_event.eventType}"
+ )
+ payload = None
+
+ try:
+ if workflow_event.eventPayload:
+ payload = json.loads(workflow_event.eventPayload)
+ except Exception as e:
+ payload = workflow_event.eventPayload
+ pass
+
+ yield StepRunEvent(type=eventType, payload=payload)
+ elif workflow_event.resourceType == RESOURCE_TYPE_WORKFLOW_RUN:
+ if workflow_event.eventType in workflow_run_event_type_mapping:
+ eventType = workflow_run_event_type_mapping[
+ workflow_event.eventType
+ ]
+ else:
+ raise Exception(
+ f"Unknown event type: {workflow_event.eventType}"
+ )
+
+ payload = None
+
+ try:
+ if workflow_event.eventPayload:
+ payload = json.loads(workflow_event.eventPayload)
+ except Exception as e:
+ pass
+
+ yield StepRunEvent(type=eventType, payload=payload)
+
+ if workflow_event.hangup:
+ listener = None
+ break
+
+ break
+ except grpc.RpcError as e:
+ # Handle different types of errors
+ if e.code() == grpc.StatusCode.CANCELLED:
+ # Context cancelled, unsubscribe and close
+ break
+ elif e.code() == grpc.StatusCode.UNAVAILABLE:
+ # Retry logic
+ # logger.info("Could not connect to Hatchet, retrying...")
+ listener = await self.retry_subscribe()
+ elif e.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
+ # logger.info("Deadline exceeded, retrying subscription")
+ continue
+ else:
+ # Unknown error, report and break
+ # logger.error(f"Failed to receive message: {e}")
+ break
+ # Raise StopAsyncIteration to properly end the generator
+
+ async def retry_subscribe(self):
+ retries = 0
+
+ while retries < DEFAULT_ACTION_LISTENER_RETRY_COUNT:
+ try:
+ if retries > 0:
+ await asyncio.sleep(DEFAULT_ACTION_LISTENER_RETRY_INTERVAL)
+
+ if self.workflow_run_id is not None:
+ return self.client.SubscribeToWorkflowEvents(
+ SubscribeToWorkflowEventsRequest(
+ workflowRunId=self.workflow_run_id,
+ ),
+ metadata=get_metadata(self.token),
+ )
+ elif self.additional_meta_kv is not None:
+ return self.client.SubscribeToWorkflowEvents(
+ SubscribeToWorkflowEventsRequest(
+ additionalMetaKey=self.additional_meta_kv[0],
+ additionalMetaValue=self.additional_meta_kv[1],
+ ),
+ metadata=get_metadata(self.token),
+ )
+ else:
+ raise Exception("no listener method provided")
+
+ except grpc.RpcError as e:
+ if e.code() == grpc.StatusCode.UNAVAILABLE:
+ retries = retries + 1
+ else:
+ raise ValueError(f"gRPC error: {e}")
+
+
+class RunEventListenerClient:
+ def __init__(self, config: ClientConfig):
+ self.token = config.token
+ self.config = config
+ self.client: DispatcherStub = None
+
+ def stream_by_run_id(self, workflow_run_id: str):
+ return self.stream(workflow_run_id)
+
+ def stream(self, workflow_run_id: str):
+ if not isinstance(workflow_run_id, str) and hasattr(workflow_run_id, "__str__"):
+ workflow_run_id = str(workflow_run_id)
+
+ if not self.client:
+ aio_conn = new_conn(self.config, True)
+ self.client = DispatcherStub(aio_conn)
+
+ return RunEventListener.for_run_id(workflow_run_id, self.client, self.token)
+
+ def stream_by_additional_metadata(self, key: str, value: str):
+ if not self.client:
+ aio_conn = new_conn(self.config, True)
+ self.client = DispatcherStub(aio_conn)
+
+ return RunEventListener.for_additional_meta(key, value, self.client, self.token)
+
+ async def on(self, workflow_run_id: str, handler: callable = None):
+ async for event in self.stream(workflow_run_id):
+ # call the handler if provided
+ if handler:
+ handler(event)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/workflow_listener.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/workflow_listener.py
new file mode 100644
index 00000000..8bf71a3c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/clients/workflow_listener.py
@@ -0,0 +1,277 @@
+import asyncio
+import json
+from collections.abc import AsyncIterator
+from typing import AsyncGenerator
+
+import grpc
+from grpc._cython import cygrpc
+
+from hatchet_sdk.clients.event_ts import Event_ts, read_with_interrupt
+from hatchet_sdk.connection import new_conn
+from hatchet_sdk.contracts.dispatcher_pb2 import (
+ SubscribeToWorkflowRunsRequest,
+ WorkflowRunEvent,
+)
+from hatchet_sdk.contracts.dispatcher_pb2_grpc import DispatcherStub
+
+from ..loader import ClientConfig
+from ..logger import logger
+from ..metadata import get_metadata
+
+DEFAULT_WORKFLOW_LISTENER_RETRY_INTERVAL = 3 # seconds
+DEFAULT_WORKFLOW_LISTENER_RETRY_COUNT = 5
+DEFAULT_WORKFLOW_LISTENER_INTERRUPT_INTERVAL = 1800 # 30 minutes
+
+DEDUPE_MESSAGE = "DUPLICATE_WORKFLOW_RUN"
+
+
+class _Subscription:
+ def __init__(self, id: int, workflow_run_id: str):
+ self.id = id
+ self.workflow_run_id = workflow_run_id
+ self.queue: asyncio.Queue[WorkflowRunEvent | None] = asyncio.Queue()
+
+ async def __aiter__(self):
+ return self
+
+ async def __anext__(self) -> WorkflowRunEvent:
+ return await self.queue.get()
+
+ async def get(self) -> WorkflowRunEvent:
+ event = await self.queue.get()
+
+ if event is None:
+ raise StopAsyncIteration
+
+ return event
+
+ async def put(self, item: WorkflowRunEvent):
+ await self.queue.put(item)
+
+ async def close(self):
+ await self.queue.put(None)
+
+
+class PooledWorkflowRunListener:
+ # list of all active subscriptions, mapping from a subscription id to a workflow run id
+ subscriptionsToWorkflows: dict[int, str] = {}
+
+ # list of workflow run ids mapped to an array of subscription ids
+ workflowsToSubscriptions: dict[str, list[int]] = {}
+
+ subscription_counter: int = 0
+ subscription_counter_lock: asyncio.Lock = asyncio.Lock()
+
+ requests: asyncio.Queue[SubscribeToWorkflowRunsRequest] = asyncio.Queue()
+
+ listener: AsyncGenerator[WorkflowRunEvent, None] = None
+ listener_task: asyncio.Task = None
+
+ curr_requester: int = 0
+
+ # events have keys of the format workflow_run_id + subscription_id
+ events: dict[int, _Subscription] = {}
+
+ interrupter: asyncio.Task = None
+
+ def __init__(self, config: ClientConfig):
+ try:
+ asyncio.get_running_loop()
+ except RuntimeError:
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+
+ conn = new_conn(config, True)
+ self.client = DispatcherStub(conn)
+ self.token = config.token
+ self.config = config
+
+ async def _interrupter(self):
+ """
+ _interrupter runs in a separate thread and interrupts the listener according to a configurable duration.
+ """
+ await asyncio.sleep(DEFAULT_WORKFLOW_LISTENER_INTERRUPT_INTERVAL)
+
+ if self.interrupt is not None:
+ self.interrupt.set()
+
+ async def _init_producer(self):
+ try:
+ if not self.listener:
+ while True:
+ try:
+ self.listener = await self._retry_subscribe()
+
+ logger.debug("Workflow run listener connected.")
+
+ # spawn an interrupter task
+ if self.interrupter is not None and not self.interrupter.done():
+ self.interrupter.cancel()
+
+ self.interrupter = asyncio.create_task(self._interrupter())
+
+ while True:
+ self.interrupt = Event_ts()
+ t = asyncio.create_task(
+ read_with_interrupt(self.listener, self.interrupt)
+ )
+ await self.interrupt.wait()
+
+ if not t.done():
+ # print a warning
+ logger.warning(
+ "Interrupted read_with_interrupt task of workflow run listener"
+ )
+
+ t.cancel()
+ self.listener.cancel()
+ await asyncio.sleep(
+ DEFAULT_WORKFLOW_LISTENER_RETRY_INTERVAL
+ )
+ break
+
+ workflow_event: WorkflowRunEvent = t.result()
+
+ if workflow_event is cygrpc.EOF:
+ break
+
+ # get a list of subscriptions for this workflow
+ subscriptions = self.workflowsToSubscriptions.get(
+ workflow_event.workflowRunId, []
+ )
+
+ for subscription_id in subscriptions:
+ await self.events[subscription_id].put(workflow_event)
+
+ except grpc.RpcError as e:
+ logger.debug(f"grpc error in workflow run listener: {e}")
+ await asyncio.sleep(DEFAULT_WORKFLOW_LISTENER_RETRY_INTERVAL)
+ continue
+
+ except Exception as e:
+ logger.error(f"Error in workflow run listener: {e}")
+
+ self.listener = None
+
+ # close all subscriptions
+ for subscription_id in self.events:
+ await self.events[subscription_id].close()
+
+ raise e
+
+ async def _request(self) -> AsyncIterator[SubscribeToWorkflowRunsRequest]:
+ self.curr_requester = self.curr_requester + 1
+
+ # replay all existing subscriptions
+ workflow_run_set = set(self.subscriptionsToWorkflows.values())
+
+ for workflow_run_id in workflow_run_set:
+ yield SubscribeToWorkflowRunsRequest(
+ workflowRunId=workflow_run_id,
+ )
+
+ while True:
+ request = await self.requests.get()
+
+ # if the request is an int which matches the current requester, then we should stop
+ if request == self.curr_requester:
+ break
+
+ # if we've gotten an int that doesn't match the current requester, then we should ignore it
+ if isinstance(request, int):
+ continue
+
+ yield request
+ self.requests.task_done()
+
+ def cleanup_subscription(self, subscription_id: int):
+ workflow_run_id = self.subscriptionsToWorkflows[subscription_id]
+
+ if workflow_run_id in self.workflowsToSubscriptions:
+ self.workflowsToSubscriptions[workflow_run_id].remove(subscription_id)
+
+ del self.subscriptionsToWorkflows[subscription_id]
+ del self.events[subscription_id]
+
+ async def subscribe(self, workflow_run_id: str):
+ init_producer: asyncio.Task = None
+ try:
+ # create a new subscription id, place a mutex on the counter
+ await self.subscription_counter_lock.acquire()
+ self.subscription_counter += 1
+ subscription_id = self.subscription_counter
+ self.subscription_counter_lock.release()
+
+ self.subscriptionsToWorkflows[subscription_id] = workflow_run_id
+
+ if workflow_run_id not in self.workflowsToSubscriptions:
+ self.workflowsToSubscriptions[workflow_run_id] = [subscription_id]
+ else:
+ self.workflowsToSubscriptions[workflow_run_id].append(subscription_id)
+
+ self.events[subscription_id] = _Subscription(
+ subscription_id, workflow_run_id
+ )
+
+ await self.requests.put(
+ SubscribeToWorkflowRunsRequest(
+ workflowRunId=workflow_run_id,
+ )
+ )
+
+ if not self.listener_task or self.listener_task.done():
+ self.listener_task = asyncio.create_task(self._init_producer())
+
+ event = await self.events[subscription_id].get()
+
+ return event
+ except asyncio.CancelledError:
+ raise
+ finally:
+ self.cleanup_subscription(subscription_id)
+
+ async def result(self, workflow_run_id: str):
+ from hatchet_sdk.clients.admin import DedupeViolationErr
+
+ event = await self.subscribe(workflow_run_id)
+
+ errors = []
+
+ if event.results:
+ errors = [result.error for result in event.results if result.error]
+
+ if errors:
+ if DEDUPE_MESSAGE in errors[0]:
+ raise DedupeViolationErr(errors[0])
+ else:
+ raise Exception(f"Workflow Errors: {errors}")
+
+ results = {
+ result.stepReadableId: json.loads(result.output)
+ for result in event.results
+ if result.output
+ }
+
+ return results
+
+ async def _retry_subscribe(self):
+ retries = 0
+
+ while retries < DEFAULT_WORKFLOW_LISTENER_RETRY_COUNT:
+ try:
+ if retries > 0:
+ await asyncio.sleep(DEFAULT_WORKFLOW_LISTENER_RETRY_INTERVAL)
+
+ # signal previous async iterator to stop
+ if self.curr_requester != 0:
+ self.requests.put_nowait(self.curr_requester)
+
+ return self.client.SubscribeToWorkflowRuns(
+ self._request(),
+ metadata=get_metadata(self.token),
+ )
+ except grpc.RpcError as e:
+ if e.code() == grpc.StatusCode.UNAVAILABLE:
+ retries = retries + 1
+ else:
+ raise ValueError(f"gRPC error: {e}")
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/connection.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/connection.py
new file mode 100644
index 00000000..185395e4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/connection.py
@@ -0,0 +1,64 @@
+import os
+from typing import TYPE_CHECKING, Any
+
+import grpc
+
+if TYPE_CHECKING:
+ from hatchet_sdk.loader import ClientConfig
+
+
+def new_conn(config: "ClientConfig", aio=False):
+
+ credentials: grpc.ChannelCredentials | None = None
+
+ # load channel credentials
+ if config.tls_config.tls_strategy == "tls":
+ root: Any | None = None
+
+ if config.tls_config.ca_file:
+ root = open(config.tls_config.ca_file, "rb").read()
+
+ credentials = grpc.ssl_channel_credentials(root_certificates=root)
+ elif config.tls_config.tls_strategy == "mtls":
+ root = open(config.tls_config.ca_file, "rb").read()
+ private_key = open(config.tls_config.key_file, "rb").read()
+ certificate_chain = open(config.tls_config.cert_file, "rb").read()
+
+ credentials = grpc.ssl_channel_credentials(
+ root_certificates=root,
+ private_key=private_key,
+ certificate_chain=certificate_chain,
+ )
+
+ start = grpc if not aio else grpc.aio
+
+ channel_options = [
+ ("grpc.max_send_message_length", config.grpc_max_send_message_length),
+ ("grpc.max_receive_message_length", config.grpc_max_recv_message_length),
+ ("grpc.keepalive_time_ms", 10 * 1000),
+ ("grpc.keepalive_timeout_ms", 60 * 1000),
+ ("grpc.client_idle_timeout_ms", 60 * 1000),
+ ("grpc.http2.max_pings_without_data", 0),
+ ("grpc.keepalive_permit_without_calls", 1),
+ ]
+
+ # Set environment variable to disable fork support. Reference: https://github.com/grpc/grpc/issues/28557
+ # When steps execute via os.fork, we see `TSI_DATA_CORRUPTED` errors.
+ os.environ["GRPC_ENABLE_FORK_SUPPORT"] = "False"
+
+ if config.tls_config.tls_strategy == "none":
+ conn = start.insecure_channel(
+ target=config.host_port,
+ options=channel_options,
+ )
+ else:
+ channel_options.append(
+ ("grpc.ssl_target_name_override", config.tls_config.server_name)
+ )
+
+ conn = start.secure_channel(
+ target=config.host_port,
+ credentials=credentials,
+ options=channel_options,
+ )
+ return conn
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/context/__init__.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/context/__init__.py
new file mode 100644
index 00000000..0cebf2bf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/context/__init__.py
@@ -0,0 +1 @@
+from .context import Context
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/context/context.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/context/context.py
new file mode 100644
index 00000000..2584949b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/context/context.py
@@ -0,0 +1,446 @@
+import inspect
+import json
+import traceback
+from concurrent.futures import Future, ThreadPoolExecutor
+from typing import Any, Generic, Type, TypeVar, cast, overload
+from warnings import warn
+
+from pydantic import BaseModel, StrictStr
+
+from hatchet_sdk.clients.events import EventClient
+from hatchet_sdk.clients.rest.tenacity_utils import tenacity_retry
+from hatchet_sdk.clients.rest_client import RestApi
+from hatchet_sdk.clients.run_event_listener import RunEventListenerClient
+from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener
+from hatchet_sdk.context.worker_context import WorkerContext
+from hatchet_sdk.contracts.dispatcher_pb2 import OverridesData
+from hatchet_sdk.contracts.workflows_pb2 import (
+ BulkTriggerWorkflowRequest,
+ TriggerWorkflowRequest,
+)
+from hatchet_sdk.utils.types import WorkflowValidator
+from hatchet_sdk.utils.typing import is_basemodel_subclass
+from hatchet_sdk.workflow_run import WorkflowRunRef
+
+from ..clients.admin import (
+ AdminClient,
+ ChildTriggerWorkflowOptions,
+ ChildWorkflowRunDict,
+ TriggerWorkflowOptions,
+ WorkflowRunDict,
+)
+from ..clients.dispatcher.dispatcher import ( # type: ignore[attr-defined]
+ Action,
+ DispatcherClient,
+)
+from ..logger import logger
+
+DEFAULT_WORKFLOW_POLLING_INTERVAL = 5 # Seconds
+
+T = TypeVar("T", bound=BaseModel)
+
+
+def get_caller_file_path() -> str:
+ caller_frame = inspect.stack()[2]
+
+ return caller_frame.filename
+
+
+class BaseContext:
+
+ action: Action
+ spawn_index: int
+
+ def _prepare_workflow_options(
+ self,
+ key: str | None = None,
+ options: ChildTriggerWorkflowOptions | None = None,
+ worker_id: str | None = None,
+ ) -> TriggerWorkflowOptions:
+ workflow_run_id = self.action.workflow_run_id
+ step_run_id = self.action.step_run_id
+
+ desired_worker_id = None
+ if options is not None and "sticky" in options and options["sticky"] == True:
+ desired_worker_id = worker_id
+
+ meta = None
+ if options is not None and "additional_metadata" in options:
+ meta = options["additional_metadata"]
+
+ ## TODO: Pydantic here to simplify this
+ trigger_options: TriggerWorkflowOptions = {
+ "parent_id": workflow_run_id,
+ "parent_step_run_id": step_run_id,
+ "child_key": key,
+ "child_index": self.spawn_index,
+ "additional_metadata": meta,
+ "desired_worker_id": desired_worker_id,
+ }
+
+ self.spawn_index += 1
+ return trigger_options
+
+
+class ContextAioImpl(BaseContext):
+ def __init__(
+ self,
+ action: Action,
+ dispatcher_client: DispatcherClient,
+ admin_client: AdminClient,
+ event_client: EventClient,
+ rest_client: RestApi,
+ workflow_listener: PooledWorkflowRunListener,
+ workflow_run_event_listener: RunEventListenerClient,
+ worker: WorkerContext,
+ namespace: str = "",
+ ):
+ self.action = action
+ self.dispatcher_client = dispatcher_client
+ self.admin_client = admin_client
+ self.event_client = event_client
+ self.rest_client = rest_client
+ self.workflow_listener = workflow_listener
+ self.workflow_run_event_listener = workflow_run_event_listener
+ self.namespace = namespace
+ self.spawn_index = -1
+ self.worker = worker
+
+ @tenacity_retry
+ async def spawn_workflow(
+ self,
+ workflow_name: str,
+ input: dict[str, Any] = {},
+ key: str | None = None,
+ options: ChildTriggerWorkflowOptions | None = None,
+ ) -> WorkflowRunRef:
+ worker_id = self.worker.id()
+ # if (
+ # options is not None
+ # and "sticky" in options
+ # and options["sticky"] == True
+ # and not self.worker.has_workflow(workflow_name)
+ # ):
+ # raise Exception(
+ # f"cannot run with sticky: workflow {workflow_name} is not registered on the worker"
+ # )
+
+ trigger_options = self._prepare_workflow_options(key, options, worker_id)
+
+ return await self.admin_client.aio.run_workflow(
+ workflow_name, input, trigger_options
+ )
+
+ @tenacity_retry
+ async def spawn_workflows(
+ self, child_workflow_runs: list[ChildWorkflowRunDict]
+ ) -> list[WorkflowRunRef]:
+
+ if len(child_workflow_runs) == 0:
+ raise Exception("no child workflows to spawn")
+
+ worker_id = self.worker.id()
+
+ bulk_trigger_workflow_runs: list[WorkflowRunDict] = []
+ for child_workflow_run in child_workflow_runs:
+ workflow_name = child_workflow_run["workflow_name"]
+ input = child_workflow_run["input"]
+
+ key = child_workflow_run.get("key")
+ options = child_workflow_run.get("options", {})
+
+ trigger_options = self._prepare_workflow_options(key, options, worker_id)
+
+ bulk_trigger_workflow_runs.append(
+ WorkflowRunDict(
+ workflow_name=workflow_name, input=input, options=trigger_options
+ )
+ )
+
+ return await self.admin_client.aio.run_workflows(bulk_trigger_workflow_runs)
+
+
+class Context(BaseContext):
+ spawn_index = -1
+
+ worker: WorkerContext
+
+ def __init__(
+ self,
+ action: Action,
+ dispatcher_client: DispatcherClient,
+ admin_client: AdminClient,
+ event_client: EventClient,
+ rest_client: RestApi,
+ workflow_listener: PooledWorkflowRunListener,
+ workflow_run_event_listener: RunEventListenerClient,
+ worker: WorkerContext,
+ namespace: str = "",
+ validator_registry: dict[str, WorkflowValidator] = {},
+ ):
+ self.worker = worker
+ self.validator_registry = validator_registry
+
+ self.aio = ContextAioImpl(
+ action,
+ dispatcher_client,
+ admin_client,
+ event_client,
+ rest_client,
+ workflow_listener,
+ workflow_run_event_listener,
+ worker,
+ namespace,
+ )
+
+ # Check the type of action.action_payload before attempting to load it as JSON
+ if isinstance(action.action_payload, (str, bytes, bytearray)):
+ try:
+ self.data = cast(dict[str, Any], json.loads(action.action_payload))
+ except Exception as e:
+ logger.error(f"Error parsing action payload: {e}")
+ # Assign an empty dictionary if parsing fails
+ self.data: dict[str, Any] = {} # type: ignore[no-redef]
+ else:
+ # Directly assign the payload to self.data if it's already a dict
+ self.data = (
+ action.action_payload if isinstance(action.action_payload, dict) else {}
+ )
+
+ self.action = action
+
+ # FIXME: stepRunId is a legacy field, we should remove it
+ self.stepRunId = action.step_run_id
+
+ self.step_run_id = action.step_run_id
+ self.exit_flag = False
+ self.dispatcher_client = dispatcher_client
+ self.admin_client = admin_client
+ self.event_client = event_client
+ self.rest_client = rest_client
+ self.workflow_listener = workflow_listener
+ self.workflow_run_event_listener = workflow_run_event_listener
+ self.namespace = namespace
+
+ # FIXME: this limits the number of concurrent log requests to 1, which means we can do about
+ # 100 log lines per second but this depends on network.
+ self.logger_thread_pool = ThreadPoolExecutor(max_workers=1)
+ self.stream_event_thread_pool = ThreadPoolExecutor(max_workers=1)
+
+ # store each key in the overrides field in a lookup table
+ # overrides_data is a dictionary of key-value pairs
+ self.overrides_data = self.data.get("overrides", {})
+
+ if action.get_group_key_run_id != "":
+ self.input = self.data
+ else:
+ self.input = self.data.get("input", {})
+
+ def step_output(self, step: str) -> dict[str, Any] | BaseModel:
+ workflow_validator = next(
+ (v for k, v in self.validator_registry.items() if k.split(":")[-1] == step),
+ None,
+ )
+
+ try:
+ parent_step_data = cast(dict[str, Any], self.data["parents"][step])
+ except KeyError:
+ raise ValueError(f"Step output for '{step}' not found")
+
+ if workflow_validator and (v := workflow_validator.step_output):
+ return v.model_validate(parent_step_data)
+
+ return parent_step_data
+
+ def triggered_by_event(self) -> bool:
+ return cast(str, self.data.get("triggered_by", "")) == "event"
+
+ def workflow_input(self) -> dict[str, Any] | T:
+ if (r := self.validator_registry.get(self.action.action_id)) and (
+ i := r.workflow_input
+ ):
+ return cast(
+ T,
+ i.model_validate(self.input),
+ )
+
+ return self.input
+
+ def workflow_run_id(self) -> str:
+ return self.action.workflow_run_id
+
+ def cancel(self) -> None:
+ logger.debug("cancelling step...")
+ self.exit_flag = True
+
+ # done returns true if the context has been cancelled
+ def done(self) -> bool:
+ return self.exit_flag
+
+ def playground(self, name: str, default: str | None = None) -> str | None:
+ # if the key exists in the overrides_data field, return the value
+ if name in self.overrides_data:
+ warn(
+ "Use of `overrides_data` is deprecated.",
+ DeprecationWarning,
+ stacklevel=1,
+ )
+ return str(self.overrides_data[name])
+
+ caller_file = get_caller_file_path()
+
+ self.dispatcher_client.put_overrides_data(
+ OverridesData(
+ stepRunId=self.stepRunId,
+ path=name,
+ value=json.dumps(default),
+ callerFilename=caller_file,
+ )
+ )
+
+ return default
+
+ def _log(self, line: str) -> tuple[bool, Exception | None]:
+ try:
+ self.event_client.log(message=line, step_run_id=self.stepRunId)
+ return True, None
+ except Exception as e:
+ # we don't want to raise an exception here, as it will kill the log thread
+ return False, e
+
+ def log(self, line: Any, raise_on_error: bool = False) -> None:
+ if self.stepRunId == "":
+ return
+
+ if not isinstance(line, str):
+ try:
+ line = json.dumps(line)
+ except Exception:
+ line = str(line)
+
+ future = self.logger_thread_pool.submit(self._log, line)
+
+ def handle_result(future: Future[tuple[bool, Exception | None]]) -> None:
+ success, exception = future.result()
+ if not success and exception:
+ if raise_on_error:
+ raise exception
+ else:
+ thread_trace = "".join(
+ traceback.format_exception(
+ type(exception), exception, exception.__traceback__
+ )
+ )
+ call_site_trace = "".join(traceback.format_stack())
+ logger.error(
+ f"Error in log thread: {exception}\n{thread_trace}\nCalled from:\n{call_site_trace}"
+ )
+
+ future.add_done_callback(handle_result)
+
+ def release_slot(self) -> None:
+ return self.dispatcher_client.release_slot(self.stepRunId)
+
+ def _put_stream(self, data: str | bytes) -> None:
+ try:
+ self.event_client.stream(data=data, step_run_id=self.stepRunId)
+ except Exception as e:
+ logger.error(f"Error putting stream event: {e}")
+
+ def put_stream(self, data: str | bytes) -> None:
+ if self.stepRunId == "":
+ return
+
+ self.stream_event_thread_pool.submit(self._put_stream, data)
+
+ def refresh_timeout(self, increment_by: str) -> None:
+ try:
+ return self.dispatcher_client.refresh_timeout(
+ step_run_id=self.stepRunId, increment_by=increment_by
+ )
+ except Exception as e:
+ logger.error(f"Error refreshing timeout: {e}")
+
+ def retry_count(self) -> int:
+ return self.action.retry_count
+
+ def additional_metadata(self) -> dict[str, Any] | None:
+ return self.action.additional_metadata
+
+ def child_index(self) -> int | None:
+ return self.action.child_workflow_index
+
+ def child_key(self) -> str | None:
+ return self.action.child_workflow_key
+
+ def parent_workflow_run_id(self) -> str | None:
+ return self.action.parent_workflow_run_id
+
+ def step_run_errors(self) -> dict[str, str]:
+ errors = cast(dict[str, str], self.data.get("step_run_errors", {}))
+
+ if not errors:
+ logger.error(
+ "No step run errors found. `context.step_run_errors` is intended to be run in an on-failure step, and will only work on engine versions more recent than v0.53.10"
+ )
+
+ return errors
+
+ def fetch_run_failures(self) -> list[dict[str, StrictStr]]:
+ data = self.rest_client.workflow_run_get(self.action.workflow_run_id)
+ other_job_runs = [
+ run for run in (data.job_runs or []) if run.job_id != self.action.job_id
+ ]
+ # TODO: Parse Step Runs using a Pydantic Model rather than a hand crafted dictionary
+ return [
+ {
+ "step_id": step_run.step_id,
+ "step_run_action_name": step_run.step.action,
+ "error": step_run.error,
+ }
+ for job_run in other_job_runs
+ if job_run.step_runs
+ for step_run in job_run.step_runs
+ if step_run.error and step_run.step
+ ]
+
+ @tenacity_retry
+ def spawn_workflow(
+ self,
+ workflow_name: str,
+ input: dict[str, Any] = {},
+ key: str | None = None,
+ options: ChildTriggerWorkflowOptions | None = None,
+ ) -> WorkflowRunRef:
+ worker_id = self.worker.id()
+ trigger_options = self._prepare_workflow_options(key, options, worker_id)
+
+ return self.admin_client.run_workflow(workflow_name, input, trigger_options)
+
+ @tenacity_retry
+ def spawn_workflows(
+ self, child_workflow_runs: list[ChildWorkflowRunDict]
+ ) -> list[WorkflowRunRef]:
+
+ if len(child_workflow_runs) == 0:
+ raise Exception("no child workflows to spawn")
+
+ worker_id = self.worker.id()
+
+ bulk_trigger_workflow_runs: list[WorkflowRunDict] = []
+ for child_workflow_run in child_workflow_runs:
+ workflow_name = child_workflow_run["workflow_name"]
+ input = child_workflow_run["input"]
+
+ key = child_workflow_run.get("key")
+ options = child_workflow_run.get("options", {})
+
+ trigger_options = self._prepare_workflow_options(key, options, worker_id)
+
+ bulk_trigger_workflow_runs.append(
+ WorkflowRunDict(
+ workflow_name=workflow_name, input=input, options=trigger_options
+ )
+ )
+
+ return self.admin_client.run_workflows(bulk_trigger_workflow_runs)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/context/worker_context.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/context/worker_context.py
new file mode 100644
index 00000000..770ae097
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/context/worker_context.py
@@ -0,0 +1,28 @@
+from hatchet_sdk.clients.dispatcher.dispatcher import DispatcherClient
+
+
+class WorkerContext:
+ _worker_id: str | None = None
+ _registered_workflow_names: list[str] = []
+ _labels: dict[str, str | int] = {}
+
+ def __init__(self, labels: dict[str, str | int], client: DispatcherClient):
+ self._labels = labels
+ self.client = client
+
+ def labels(self) -> dict[str, str | int]:
+ return self._labels
+
+ def upsert_labels(self, labels: dict[str, str | int]) -> None:
+ self.client.upsert_worker_labels(self._worker_id, labels)
+ self._labels.update(labels)
+
+ async def async_upsert_labels(self, labels: dict[str, str | int]) -> None:
+ await self.client.async_upsert_worker_labels(self._worker_id, labels)
+ self._labels.update(labels)
+
+ def id(self) -> str | None:
+ return self._worker_id
+
+ # def has_workflow(self, workflow_name: str):
+ # return workflow_name in self._registered_workflow_names
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/dispatcher_pb2.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/dispatcher_pb2.py
new file mode 100644
index 00000000..5a939ebd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/dispatcher_pb2.py
@@ -0,0 +1,102 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: dispatcher.proto
+# Protobuf Python Version: 5.26.1
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"V\n\x0cWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValue\"\xc8\x01\n\x0bRuntimeInfo\x12\x17\n\nsdkVersion\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1c\n\x08language\x18\x02 \x01(\x0e\x32\x05.SDKSH\x01\x88\x01\x01\x12\x1c\n\x0flanguageVersion\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x0f\n\x02os\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x12\n\x05\x65xtra\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\r\n\x0b_sdkVersionB\x0b\n\t_languageB\x12\n\x10_languageVersionB\x05\n\x03_osB\x08\n\x06_extra\"\xc0\x02\n\x15WorkerRegisterRequest\x12\x12\n\nworkerName\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x14\n\x07maxRuns\x18\x04 \x01(\x05H\x00\x88\x01\x01\x12\x32\n\x06labels\x18\x05 \x03(\x0b\x32\".WorkerRegisterRequest.LabelsEntry\x12\x16\n\twebhookId\x18\x06 \x01(\tH\x01\x88\x01\x01\x12&\n\x0bruntimeInfo\x18\x07 \x01(\x0b\x32\x0c.RuntimeInfoH\x02\x88\x01\x01\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\x42\n\n\x08_maxRunsB\x0c\n\n_webhookIdB\x0e\n\x0c_runtimeInfo\"P\n\x16WorkerRegisterResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\x12\x12\n\nworkerName\x18\x03 \x01(\t\"\xa3\x01\n\x19UpsertWorkerLabelsRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x36\n\x06labels\x18\x02 \x03(\x0b\x32&.UpsertWorkerLabelsRequest.LabelsEntry\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\"@\n\x1aUpsertWorkerLabelsResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\x86\x04\n\x0e\x41ssignedAction\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\r\n\x05jobId\x18\x04 \x01(\t\x12\x0f\n\x07jobName\x18\x05 \x01(\t\x12\x10\n\x08jobRunId\x18\x06 \x01(\t\x12\x0e\n\x06stepId\x18\x07 \x01(\t\x12\x11\n\tstepRunId\x18\x08 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\t \x01(\t\x12\x1f\n\nactionType\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x15\n\ractionPayload\x18\x0b \x01(\t\x12\x10\n\x08stepName\x18\x0c \x01(\t\x12\x12\n\nretryCount\x18\r \x01(\x05\x12 \n\x13\x61\x64\x64itional_metadata\x18\x0e \x01(\tH\x00\x88\x01\x01\x12!\n\x14\x63hild_workflow_index\x18\x0f \x01(\x05H\x01\x88\x01\x01\x12\x1f\n\x12\x63hild_workflow_key\x18\x10 \x01(\tH\x02\x88\x01\x01\x12#\n\x16parent_workflow_run_id\x18\x11 \x01(\tH\x03\x88\x01\x01\x42\x16\n\x14_additional_metadataB\x17\n\x15_child_workflow_indexB\x15\n\x13_child_workflow_keyB\x19\n\x17_parent_workflow_run_id\"\'\n\x13WorkerListenRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\",\n\x18WorkerUnsubscribeRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\"?\n\x19WorkerUnsubscribeResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xe1\x01\n\x13GroupKeyActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\teventType\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x14\n\x0c\x65ventPayload\x18\x07 \x01(\t\"\x94\x02\n\x0fStepActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\r\n\x05jobId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x0e\n\x06stepId\x18\x04 \x01(\t\x12\x11\n\tstepRunId\x18\x05 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x06 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\teventType\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x14\n\x0c\x65ventPayload\x18\t \x01(\t\x12\x17\n\nretryCount\x18\n \x01(\x05H\x00\x88\x01\x01\x42\r\n\x0b_retryCount\"9\n\x13\x41\x63tionEventResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xc0\x01\n SubscribeToWorkflowEventsRequest\x12\x1a\n\rworkflowRunId\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1e\n\x11\x61\x64\x64itionalMetaKey\x18\x02 \x01(\tH\x01\x88\x01\x01\x12 \n\x13\x61\x64\x64itionalMetaValue\x18\x03 \x01(\tH\x02\x88\x01\x01\x42\x10\n\x0e_workflowRunIdB\x14\n\x12_additionalMetaKeyB\x16\n\x14_additionalMetaValue\"7\n\x1eSubscribeToWorkflowRunsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"\xb2\x02\n\rWorkflowEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12#\n\x0cresourceType\x18\x02 \x01(\x0e\x32\r.ResourceType\x12%\n\teventType\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x12\n\nresourceId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x65ventPayload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\x12\x18\n\x0bstepRetries\x18\x08 \x01(\x05H\x00\x88\x01\x01\x12\x17\n\nretryCount\x18\t \x01(\x05H\x01\x88\x01\x01\x42\x0e\n\x0c_stepRetriesB\r\n\x0b_retryCount\"\xa8\x01\n\x10WorkflowRunEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12(\n\teventType\x18\x02 \x01(\x0e\x32\x15.WorkflowRunEventType\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x07results\x18\x04 \x03(\x0b\x32\x0e.StepRunResult\"\x8a\x01\n\rStepRunResult\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x16\n\x0estepReadableId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x12\n\x05\x65rror\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x05 \x01(\tH\x01\x88\x01\x01\x42\x08\n\x06_errorB\t\n\x07_output\"W\n\rOverridesData\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x16\n\x0e\x63\x61llerFilename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse\"U\n\x10HeartbeatRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12/\n\x0bheartbeatAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11HeartbeatResponse\"F\n\x15RefreshTimeoutRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x1a\n\x12incrementTimeoutBy\x18\x02 \x01(\t\"G\n\x16RefreshTimeoutResponse\x12-\n\ttimeoutAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\'\n\x12ReleaseSlotRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\"\x15\n\x13ReleaseSlotResponse*7\n\x04SDKS\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02GO\x10\x01\x12\n\n\x06PYTHON\x10\x02\x12\x0e\n\nTYPESCRIPT\x10\x03*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\xac\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03\x12 \n\x1cSTEP_EVENT_TYPE_ACKNOWLEDGED\x10\x04*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xfe\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_STREAM\x10\x06*<\n\x14WorkflowRunEventType\x12$\n WORKFLOW_RUN_EVENT_TYPE_FINISHED\x10\x00\x32\xf8\x06\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x35\n\x08ListenV2\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x34\n\tHeartbeat\x12\x11.HeartbeatRequest\x1a\x12.HeartbeatResponse\"\x00\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12S\n\x17SubscribeToWorkflowRuns\x12\x1f.SubscribeToWorkflowRunsRequest\x1a\x11.WorkflowRunEvent\"\x00(\x01\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x12\x43\n\x0eRefreshTimeout\x12\x16.RefreshTimeoutRequest\x1a\x17.RefreshTimeoutResponse\"\x00\x12:\n\x0bReleaseSlot\x12\x13.ReleaseSlotRequest\x1a\x14.ReleaseSlotResponse\"\x00\x12O\n\x12UpsertWorkerLabels\x12\x1a.UpsertWorkerLabelsRequest\x1a\x1b.UpsertWorkerLabelsResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'dispatcher_pb2', _globals)
+if not _descriptor._USE_C_DESCRIPTORS:
+ _globals['DESCRIPTOR']._loaded_options = None
+ _globals['DESCRIPTOR']._serialized_options = b'ZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contracts'
+ _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._loaded_options = None
+ _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_options = b'8\001'
+ _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._loaded_options = None
+ _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_options = b'8\001'
+ _globals['_SDKS']._serialized_start=3524
+ _globals['_SDKS']._serialized_end=3579
+ _globals['_ACTIONTYPE']._serialized_start=3581
+ _globals['_ACTIONTYPE']._serialized_end=3659
+ _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_start=3662
+ _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_end=3824
+ _globals['_STEPACTIONEVENTTYPE']._serialized_start=3827
+ _globals['_STEPACTIONEVENTTYPE']._serialized_end=3999
+ _globals['_RESOURCETYPE']._serialized_start=4001
+ _globals['_RESOURCETYPE']._serialized_end=4102
+ _globals['_RESOURCEEVENTTYPE']._serialized_start=4105
+ _globals['_RESOURCEEVENTTYPE']._serialized_end=4359
+ _globals['_WORKFLOWRUNEVENTTYPE']._serialized_start=4361
+ _globals['_WORKFLOWRUNEVENTTYPE']._serialized_end=4421
+ _globals['_WORKERLABELS']._serialized_start=53
+ _globals['_WORKERLABELS']._serialized_end=139
+ _globals['_RUNTIMEINFO']._serialized_start=142
+ _globals['_RUNTIMEINFO']._serialized_end=342
+ _globals['_WORKERREGISTERREQUEST']._serialized_start=345
+ _globals['_WORKERREGISTERREQUEST']._serialized_end=665
+ _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_start=563
+ _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_end=623
+ _globals['_WORKERREGISTERRESPONSE']._serialized_start=667
+ _globals['_WORKERREGISTERRESPONSE']._serialized_end=747
+ _globals['_UPSERTWORKERLABELSREQUEST']._serialized_start=750
+ _globals['_UPSERTWORKERLABELSREQUEST']._serialized_end=913
+ _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_start=563
+ _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_end=623
+ _globals['_UPSERTWORKERLABELSRESPONSE']._serialized_start=915
+ _globals['_UPSERTWORKERLABELSRESPONSE']._serialized_end=979
+ _globals['_ASSIGNEDACTION']._serialized_start=982
+ _globals['_ASSIGNEDACTION']._serialized_end=1500
+ _globals['_WORKERLISTENREQUEST']._serialized_start=1502
+ _globals['_WORKERLISTENREQUEST']._serialized_end=1541
+ _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_start=1543
+ _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_end=1587
+ _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_start=1589
+ _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_end=1652
+ _globals['_GROUPKEYACTIONEVENT']._serialized_start=1655
+ _globals['_GROUPKEYACTIONEVENT']._serialized_end=1880
+ _globals['_STEPACTIONEVENT']._serialized_start=1883
+ _globals['_STEPACTIONEVENT']._serialized_end=2159
+ _globals['_ACTIONEVENTRESPONSE']._serialized_start=2161
+ _globals['_ACTIONEVENTRESPONSE']._serialized_end=2218
+ _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_start=2221
+ _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_end=2413
+ _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_start=2415
+ _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_end=2470
+ _globals['_WORKFLOWEVENT']._serialized_start=2473
+ _globals['_WORKFLOWEVENT']._serialized_end=2779
+ _globals['_WORKFLOWRUNEVENT']._serialized_start=2782
+ _globals['_WORKFLOWRUNEVENT']._serialized_end=2950
+ _globals['_STEPRUNRESULT']._serialized_start=2953
+ _globals['_STEPRUNRESULT']._serialized_end=3091
+ _globals['_OVERRIDESDATA']._serialized_start=3093
+ _globals['_OVERRIDESDATA']._serialized_end=3180
+ _globals['_OVERRIDESDATARESPONSE']._serialized_start=3182
+ _globals['_OVERRIDESDATARESPONSE']._serialized_end=3205
+ _globals['_HEARTBEATREQUEST']._serialized_start=3207
+ _globals['_HEARTBEATREQUEST']._serialized_end=3292
+ _globals['_HEARTBEATRESPONSE']._serialized_start=3294
+ _globals['_HEARTBEATRESPONSE']._serialized_end=3313
+ _globals['_REFRESHTIMEOUTREQUEST']._serialized_start=3315
+ _globals['_REFRESHTIMEOUTREQUEST']._serialized_end=3385
+ _globals['_REFRESHTIMEOUTRESPONSE']._serialized_start=3387
+ _globals['_REFRESHTIMEOUTRESPONSE']._serialized_end=3458
+ _globals['_RELEASESLOTREQUEST']._serialized_start=3460
+ _globals['_RELEASESLOTREQUEST']._serialized_end=3499
+ _globals['_RELEASESLOTRESPONSE']._serialized_start=3501
+ _globals['_RELEASESLOTRESPONSE']._serialized_end=3522
+ _globals['_DISPATCHER']._serialized_start=4424
+ _globals['_DISPATCHER']._serialized_end=5312
+# @@protoc_insertion_point(module_scope)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/dispatcher_pb2.pyi b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/dispatcher_pb2.pyi
new file mode 100644
index 00000000..c5c82f50
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/dispatcher_pb2.pyi
@@ -0,0 +1,387 @@
+from google.protobuf import timestamp_pb2 as _timestamp_pb2
+from google.protobuf.internal import containers as _containers
+from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
+
+DESCRIPTOR: _descriptor.FileDescriptor
+
+class SDKS(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
+ __slots__ = ()
+ UNKNOWN: _ClassVar[SDKS]
+ GO: _ClassVar[SDKS]
+ PYTHON: _ClassVar[SDKS]
+ TYPESCRIPT: _ClassVar[SDKS]
+
+class ActionType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
+ __slots__ = ()
+ START_STEP_RUN: _ClassVar[ActionType]
+ CANCEL_STEP_RUN: _ClassVar[ActionType]
+ START_GET_GROUP_KEY: _ClassVar[ActionType]
+
+class GroupKeyActionEventType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
+ __slots__ = ()
+ GROUP_KEY_EVENT_TYPE_UNKNOWN: _ClassVar[GroupKeyActionEventType]
+ GROUP_KEY_EVENT_TYPE_STARTED: _ClassVar[GroupKeyActionEventType]
+ GROUP_KEY_EVENT_TYPE_COMPLETED: _ClassVar[GroupKeyActionEventType]
+ GROUP_KEY_EVENT_TYPE_FAILED: _ClassVar[GroupKeyActionEventType]
+
+class StepActionEventType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
+ __slots__ = ()
+ STEP_EVENT_TYPE_UNKNOWN: _ClassVar[StepActionEventType]
+ STEP_EVENT_TYPE_STARTED: _ClassVar[StepActionEventType]
+ STEP_EVENT_TYPE_COMPLETED: _ClassVar[StepActionEventType]
+ STEP_EVENT_TYPE_FAILED: _ClassVar[StepActionEventType]
+ STEP_EVENT_TYPE_ACKNOWLEDGED: _ClassVar[StepActionEventType]
+
+class ResourceType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
+ __slots__ = ()
+ RESOURCE_TYPE_UNKNOWN: _ClassVar[ResourceType]
+ RESOURCE_TYPE_STEP_RUN: _ClassVar[ResourceType]
+ RESOURCE_TYPE_WORKFLOW_RUN: _ClassVar[ResourceType]
+
+class ResourceEventType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
+ __slots__ = ()
+ RESOURCE_EVENT_TYPE_UNKNOWN: _ClassVar[ResourceEventType]
+ RESOURCE_EVENT_TYPE_STARTED: _ClassVar[ResourceEventType]
+ RESOURCE_EVENT_TYPE_COMPLETED: _ClassVar[ResourceEventType]
+ RESOURCE_EVENT_TYPE_FAILED: _ClassVar[ResourceEventType]
+ RESOURCE_EVENT_TYPE_CANCELLED: _ClassVar[ResourceEventType]
+ RESOURCE_EVENT_TYPE_TIMED_OUT: _ClassVar[ResourceEventType]
+ RESOURCE_EVENT_TYPE_STREAM: _ClassVar[ResourceEventType]
+
+class WorkflowRunEventType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
+ __slots__ = ()
+ WORKFLOW_RUN_EVENT_TYPE_FINISHED: _ClassVar[WorkflowRunEventType]
+UNKNOWN: SDKS
+GO: SDKS
+PYTHON: SDKS
+TYPESCRIPT: SDKS
+START_STEP_RUN: ActionType
+CANCEL_STEP_RUN: ActionType
+START_GET_GROUP_KEY: ActionType
+GROUP_KEY_EVENT_TYPE_UNKNOWN: GroupKeyActionEventType
+GROUP_KEY_EVENT_TYPE_STARTED: GroupKeyActionEventType
+GROUP_KEY_EVENT_TYPE_COMPLETED: GroupKeyActionEventType
+GROUP_KEY_EVENT_TYPE_FAILED: GroupKeyActionEventType
+STEP_EVENT_TYPE_UNKNOWN: StepActionEventType
+STEP_EVENT_TYPE_STARTED: StepActionEventType
+STEP_EVENT_TYPE_COMPLETED: StepActionEventType
+STEP_EVENT_TYPE_FAILED: StepActionEventType
+STEP_EVENT_TYPE_ACKNOWLEDGED: StepActionEventType
+RESOURCE_TYPE_UNKNOWN: ResourceType
+RESOURCE_TYPE_STEP_RUN: ResourceType
+RESOURCE_TYPE_WORKFLOW_RUN: ResourceType
+RESOURCE_EVENT_TYPE_UNKNOWN: ResourceEventType
+RESOURCE_EVENT_TYPE_STARTED: ResourceEventType
+RESOURCE_EVENT_TYPE_COMPLETED: ResourceEventType
+RESOURCE_EVENT_TYPE_FAILED: ResourceEventType
+RESOURCE_EVENT_TYPE_CANCELLED: ResourceEventType
+RESOURCE_EVENT_TYPE_TIMED_OUT: ResourceEventType
+RESOURCE_EVENT_TYPE_STREAM: ResourceEventType
+WORKFLOW_RUN_EVENT_TYPE_FINISHED: WorkflowRunEventType
+
+class WorkerLabels(_message.Message):
+ __slots__ = ("strValue", "intValue")
+ STRVALUE_FIELD_NUMBER: _ClassVar[int]
+ INTVALUE_FIELD_NUMBER: _ClassVar[int]
+ strValue: str
+ intValue: int
+ def __init__(self, strValue: _Optional[str] = ..., intValue: _Optional[int] = ...) -> None: ...
+
+class RuntimeInfo(_message.Message):
+ __slots__ = ("sdkVersion", "language", "languageVersion", "os", "extra")
+ SDKVERSION_FIELD_NUMBER: _ClassVar[int]
+ LANGUAGE_FIELD_NUMBER: _ClassVar[int]
+ LANGUAGEVERSION_FIELD_NUMBER: _ClassVar[int]
+ OS_FIELD_NUMBER: _ClassVar[int]
+ EXTRA_FIELD_NUMBER: _ClassVar[int]
+ sdkVersion: str
+ language: SDKS
+ languageVersion: str
+ os: str
+ extra: str
+ def __init__(self, sdkVersion: _Optional[str] = ..., language: _Optional[_Union[SDKS, str]] = ..., languageVersion: _Optional[str] = ..., os: _Optional[str] = ..., extra: _Optional[str] = ...) -> None: ...
+
+class WorkerRegisterRequest(_message.Message):
+ __slots__ = ("workerName", "actions", "services", "maxRuns", "labels", "webhookId", "runtimeInfo")
+ class LabelsEntry(_message.Message):
+ __slots__ = ("key", "value")
+ KEY_FIELD_NUMBER: _ClassVar[int]
+ VALUE_FIELD_NUMBER: _ClassVar[int]
+ key: str
+ value: WorkerLabels
+ def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[WorkerLabels, _Mapping]] = ...) -> None: ...
+ WORKERNAME_FIELD_NUMBER: _ClassVar[int]
+ ACTIONS_FIELD_NUMBER: _ClassVar[int]
+ SERVICES_FIELD_NUMBER: _ClassVar[int]
+ MAXRUNS_FIELD_NUMBER: _ClassVar[int]
+ LABELS_FIELD_NUMBER: _ClassVar[int]
+ WEBHOOKID_FIELD_NUMBER: _ClassVar[int]
+ RUNTIMEINFO_FIELD_NUMBER: _ClassVar[int]
+ workerName: str
+ actions: _containers.RepeatedScalarFieldContainer[str]
+ services: _containers.RepeatedScalarFieldContainer[str]
+ maxRuns: int
+ labels: _containers.MessageMap[str, WorkerLabels]
+ webhookId: str
+ runtimeInfo: RuntimeInfo
+ def __init__(self, workerName: _Optional[str] = ..., actions: _Optional[_Iterable[str]] = ..., services: _Optional[_Iterable[str]] = ..., maxRuns: _Optional[int] = ..., labels: _Optional[_Mapping[str, WorkerLabels]] = ..., webhookId: _Optional[str] = ..., runtimeInfo: _Optional[_Union[RuntimeInfo, _Mapping]] = ...) -> None: ...
+
+class WorkerRegisterResponse(_message.Message):
+ __slots__ = ("tenantId", "workerId", "workerName")
+ TENANTID_FIELD_NUMBER: _ClassVar[int]
+ WORKERID_FIELD_NUMBER: _ClassVar[int]
+ WORKERNAME_FIELD_NUMBER: _ClassVar[int]
+ tenantId: str
+ workerId: str
+ workerName: str
+ def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ..., workerName: _Optional[str] = ...) -> None: ...
+
+class UpsertWorkerLabelsRequest(_message.Message):
+ __slots__ = ("workerId", "labels")
+ class LabelsEntry(_message.Message):
+ __slots__ = ("key", "value")
+ KEY_FIELD_NUMBER: _ClassVar[int]
+ VALUE_FIELD_NUMBER: _ClassVar[int]
+ key: str
+ value: WorkerLabels
+ def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[WorkerLabels, _Mapping]] = ...) -> None: ...
+ WORKERID_FIELD_NUMBER: _ClassVar[int]
+ LABELS_FIELD_NUMBER: _ClassVar[int]
+ workerId: str
+ labels: _containers.MessageMap[str, WorkerLabels]
+ def __init__(self, workerId: _Optional[str] = ..., labels: _Optional[_Mapping[str, WorkerLabels]] = ...) -> None: ...
+
+class UpsertWorkerLabelsResponse(_message.Message):
+ __slots__ = ("tenantId", "workerId")
+ TENANTID_FIELD_NUMBER: _ClassVar[int]
+ WORKERID_FIELD_NUMBER: _ClassVar[int]
+ tenantId: str
+ workerId: str
+ def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ...) -> None: ...
+
+class AssignedAction(_message.Message):
+ __slots__ = ("tenantId", "workflowRunId", "getGroupKeyRunId", "jobId", "jobName", "jobRunId", "stepId", "stepRunId", "actionId", "actionType", "actionPayload", "stepName", "retryCount", "additional_metadata", "child_workflow_index", "child_workflow_key", "parent_workflow_run_id")
+ TENANTID_FIELD_NUMBER: _ClassVar[int]
+ WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int]
+ GETGROUPKEYRUNID_FIELD_NUMBER: _ClassVar[int]
+ JOBID_FIELD_NUMBER: _ClassVar[int]
+ JOBNAME_FIELD_NUMBER: _ClassVar[int]
+ JOBRUNID_FIELD_NUMBER: _ClassVar[int]
+ STEPID_FIELD_NUMBER: _ClassVar[int]
+ STEPRUNID_FIELD_NUMBER: _ClassVar[int]
+ ACTIONID_FIELD_NUMBER: _ClassVar[int]
+ ACTIONTYPE_FIELD_NUMBER: _ClassVar[int]
+ ACTIONPAYLOAD_FIELD_NUMBER: _ClassVar[int]
+ STEPNAME_FIELD_NUMBER: _ClassVar[int]
+ RETRYCOUNT_FIELD_NUMBER: _ClassVar[int]
+ ADDITIONAL_METADATA_FIELD_NUMBER: _ClassVar[int]
+ CHILD_WORKFLOW_INDEX_FIELD_NUMBER: _ClassVar[int]
+ CHILD_WORKFLOW_KEY_FIELD_NUMBER: _ClassVar[int]
+ PARENT_WORKFLOW_RUN_ID_FIELD_NUMBER: _ClassVar[int]
+ tenantId: str
+ workflowRunId: str
+ getGroupKeyRunId: str
+ jobId: str
+ jobName: str
+ jobRunId: str
+ stepId: str
+ stepRunId: str
+ actionId: str
+ actionType: ActionType
+ actionPayload: str
+ stepName: str
+ retryCount: int
+ additional_metadata: str
+ child_workflow_index: int
+ child_workflow_key: str
+ parent_workflow_run_id: str
+ def __init__(self, tenantId: _Optional[str] = ..., workflowRunId: _Optional[str] = ..., getGroupKeyRunId: _Optional[str] = ..., jobId: _Optional[str] = ..., jobName: _Optional[str] = ..., jobRunId: _Optional[str] = ..., stepId: _Optional[str] = ..., stepRunId: _Optional[str] = ..., actionId: _Optional[str] = ..., actionType: _Optional[_Union[ActionType, str]] = ..., actionPayload: _Optional[str] = ..., stepName: _Optional[str] = ..., retryCount: _Optional[int] = ..., additional_metadata: _Optional[str] = ..., child_workflow_index: _Optional[int] = ..., child_workflow_key: _Optional[str] = ..., parent_workflow_run_id: _Optional[str] = ...) -> None: ...
+
+class WorkerListenRequest(_message.Message):
+ __slots__ = ("workerId",)
+ WORKERID_FIELD_NUMBER: _ClassVar[int]
+ workerId: str
+ def __init__(self, workerId: _Optional[str] = ...) -> None: ...
+
+class WorkerUnsubscribeRequest(_message.Message):
+ __slots__ = ("workerId",)
+ WORKERID_FIELD_NUMBER: _ClassVar[int]
+ workerId: str
+ def __init__(self, workerId: _Optional[str] = ...) -> None: ...
+
+class WorkerUnsubscribeResponse(_message.Message):
+ __slots__ = ("tenantId", "workerId")
+ TENANTID_FIELD_NUMBER: _ClassVar[int]
+ WORKERID_FIELD_NUMBER: _ClassVar[int]
+ tenantId: str
+ workerId: str
+ def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ...) -> None: ...
+
+class GroupKeyActionEvent(_message.Message):
+ __slots__ = ("workerId", "workflowRunId", "getGroupKeyRunId", "actionId", "eventTimestamp", "eventType", "eventPayload")
+ WORKERID_FIELD_NUMBER: _ClassVar[int]
+ WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int]
+ GETGROUPKEYRUNID_FIELD_NUMBER: _ClassVar[int]
+ ACTIONID_FIELD_NUMBER: _ClassVar[int]
+ EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int]
+ EVENTTYPE_FIELD_NUMBER: _ClassVar[int]
+ EVENTPAYLOAD_FIELD_NUMBER: _ClassVar[int]
+ workerId: str
+ workflowRunId: str
+ getGroupKeyRunId: str
+ actionId: str
+ eventTimestamp: _timestamp_pb2.Timestamp
+ eventType: GroupKeyActionEventType
+ eventPayload: str
+ def __init__(self, workerId: _Optional[str] = ..., workflowRunId: _Optional[str] = ..., getGroupKeyRunId: _Optional[str] = ..., actionId: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., eventType: _Optional[_Union[GroupKeyActionEventType, str]] = ..., eventPayload: _Optional[str] = ...) -> None: ...
+
+class StepActionEvent(_message.Message):
+ __slots__ = ("workerId", "jobId", "jobRunId", "stepId", "stepRunId", "actionId", "eventTimestamp", "eventType", "eventPayload", "retryCount")
+ WORKERID_FIELD_NUMBER: _ClassVar[int]
+ JOBID_FIELD_NUMBER: _ClassVar[int]
+ JOBRUNID_FIELD_NUMBER: _ClassVar[int]
+ STEPID_FIELD_NUMBER: _ClassVar[int]
+ STEPRUNID_FIELD_NUMBER: _ClassVar[int]
+ ACTIONID_FIELD_NUMBER: _ClassVar[int]
+ EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int]
+ EVENTTYPE_FIELD_NUMBER: _ClassVar[int]
+ EVENTPAYLOAD_FIELD_NUMBER: _ClassVar[int]
+ RETRYCOUNT_FIELD_NUMBER: _ClassVar[int]
+ workerId: str
+ jobId: str
+ jobRunId: str
+ stepId: str
+ stepRunId: str
+ actionId: str
+ eventTimestamp: _timestamp_pb2.Timestamp
+ eventType: StepActionEventType
+ eventPayload: str
+ retryCount: int
+ def __init__(self, workerId: _Optional[str] = ..., jobId: _Optional[str] = ..., jobRunId: _Optional[str] = ..., stepId: _Optional[str] = ..., stepRunId: _Optional[str] = ..., actionId: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., eventType: _Optional[_Union[StepActionEventType, str]] = ..., eventPayload: _Optional[str] = ..., retryCount: _Optional[int] = ...) -> None: ...
+
+class ActionEventResponse(_message.Message):
+ __slots__ = ("tenantId", "workerId")
+ TENANTID_FIELD_NUMBER: _ClassVar[int]
+ WORKERID_FIELD_NUMBER: _ClassVar[int]
+ tenantId: str
+ workerId: str
+ def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ...) -> None: ...
+
+class SubscribeToWorkflowEventsRequest(_message.Message):
+ __slots__ = ("workflowRunId", "additionalMetaKey", "additionalMetaValue")
+ WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int]
+ ADDITIONALMETAKEY_FIELD_NUMBER: _ClassVar[int]
+ ADDITIONALMETAVALUE_FIELD_NUMBER: _ClassVar[int]
+ workflowRunId: str
+ additionalMetaKey: str
+ additionalMetaValue: str
+ def __init__(self, workflowRunId: _Optional[str] = ..., additionalMetaKey: _Optional[str] = ..., additionalMetaValue: _Optional[str] = ...) -> None: ...
+
+class SubscribeToWorkflowRunsRequest(_message.Message):
+ __slots__ = ("workflowRunId",)
+ WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int]
+ workflowRunId: str
+ def __init__(self, workflowRunId: _Optional[str] = ...) -> None: ...
+
+class WorkflowEvent(_message.Message):
+ __slots__ = ("workflowRunId", "resourceType", "eventType", "resourceId", "eventTimestamp", "eventPayload", "hangup", "stepRetries", "retryCount")
+ WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int]
+ RESOURCETYPE_FIELD_NUMBER: _ClassVar[int]
+ EVENTTYPE_FIELD_NUMBER: _ClassVar[int]
+ RESOURCEID_FIELD_NUMBER: _ClassVar[int]
+ EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int]
+ EVENTPAYLOAD_FIELD_NUMBER: _ClassVar[int]
+ HANGUP_FIELD_NUMBER: _ClassVar[int]
+ STEPRETRIES_FIELD_NUMBER: _ClassVar[int]
+ RETRYCOUNT_FIELD_NUMBER: _ClassVar[int]
+ workflowRunId: str
+ resourceType: ResourceType
+ eventType: ResourceEventType
+ resourceId: str
+ eventTimestamp: _timestamp_pb2.Timestamp
+ eventPayload: str
+ hangup: bool
+ stepRetries: int
+ retryCount: int
+ def __init__(self, workflowRunId: _Optional[str] = ..., resourceType: _Optional[_Union[ResourceType, str]] = ..., eventType: _Optional[_Union[ResourceEventType, str]] = ..., resourceId: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., eventPayload: _Optional[str] = ..., hangup: bool = ..., stepRetries: _Optional[int] = ..., retryCount: _Optional[int] = ...) -> None: ...
+
+class WorkflowRunEvent(_message.Message):
+ __slots__ = ("workflowRunId", "eventType", "eventTimestamp", "results")
+ WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int]
+ EVENTTYPE_FIELD_NUMBER: _ClassVar[int]
+ EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int]
+ RESULTS_FIELD_NUMBER: _ClassVar[int]
+ workflowRunId: str
+ eventType: WorkflowRunEventType
+ eventTimestamp: _timestamp_pb2.Timestamp
+ results: _containers.RepeatedCompositeFieldContainer[StepRunResult]
+ def __init__(self, workflowRunId: _Optional[str] = ..., eventType: _Optional[_Union[WorkflowRunEventType, str]] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., results: _Optional[_Iterable[_Union[StepRunResult, _Mapping]]] = ...) -> None: ...
+
+class StepRunResult(_message.Message):
+ __slots__ = ("stepRunId", "stepReadableId", "jobRunId", "error", "output")
+ STEPRUNID_FIELD_NUMBER: _ClassVar[int]
+ STEPREADABLEID_FIELD_NUMBER: _ClassVar[int]
+ JOBRUNID_FIELD_NUMBER: _ClassVar[int]
+ ERROR_FIELD_NUMBER: _ClassVar[int]
+ OUTPUT_FIELD_NUMBER: _ClassVar[int]
+ stepRunId: str
+ stepReadableId: str
+ jobRunId: str
+ error: str
+ output: str
+ def __init__(self, stepRunId: _Optional[str] = ..., stepReadableId: _Optional[str] = ..., jobRunId: _Optional[str] = ..., error: _Optional[str] = ..., output: _Optional[str] = ...) -> None: ...
+
+class OverridesData(_message.Message):
+ __slots__ = ("stepRunId", "path", "value", "callerFilename")
+ STEPRUNID_FIELD_NUMBER: _ClassVar[int]
+ PATH_FIELD_NUMBER: _ClassVar[int]
+ VALUE_FIELD_NUMBER: _ClassVar[int]
+ CALLERFILENAME_FIELD_NUMBER: _ClassVar[int]
+ stepRunId: str
+ path: str
+ value: str
+ callerFilename: str
+ def __init__(self, stepRunId: _Optional[str] = ..., path: _Optional[str] = ..., value: _Optional[str] = ..., callerFilename: _Optional[str] = ...) -> None: ...
+
+class OverridesDataResponse(_message.Message):
+ __slots__ = ()
+ def __init__(self) -> None: ...
+
+class HeartbeatRequest(_message.Message):
+ __slots__ = ("workerId", "heartbeatAt")
+ WORKERID_FIELD_NUMBER: _ClassVar[int]
+ HEARTBEATAT_FIELD_NUMBER: _ClassVar[int]
+ workerId: str
+ heartbeatAt: _timestamp_pb2.Timestamp
+ def __init__(self, workerId: _Optional[str] = ..., heartbeatAt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ...
+
+class HeartbeatResponse(_message.Message):
+ __slots__ = ()
+ def __init__(self) -> None: ...
+
+class RefreshTimeoutRequest(_message.Message):
+ __slots__ = ("stepRunId", "incrementTimeoutBy")
+ STEPRUNID_FIELD_NUMBER: _ClassVar[int]
+ INCREMENTTIMEOUTBY_FIELD_NUMBER: _ClassVar[int]
+ stepRunId: str
+ incrementTimeoutBy: str
+ def __init__(self, stepRunId: _Optional[str] = ..., incrementTimeoutBy: _Optional[str] = ...) -> None: ...
+
+class RefreshTimeoutResponse(_message.Message):
+ __slots__ = ("timeoutAt",)
+ TIMEOUTAT_FIELD_NUMBER: _ClassVar[int]
+ timeoutAt: _timestamp_pb2.Timestamp
+ def __init__(self, timeoutAt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ...
+
+class ReleaseSlotRequest(_message.Message):
+ __slots__ = ("stepRunId",)
+ STEPRUNID_FIELD_NUMBER: _ClassVar[int]
+ stepRunId: str
+ def __init__(self, stepRunId: _Optional[str] = ...) -> None: ...
+
+class ReleaseSlotResponse(_message.Message):
+ __slots__ = ()
+ def __init__(self) -> None: ...
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/dispatcher_pb2_grpc.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/dispatcher_pb2_grpc.py
new file mode 100644
index 00000000..6b0d8cf3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/dispatcher_pb2_grpc.py
@@ -0,0 +1,621 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+import warnings
+
+from . import dispatcher_pb2 as dispatcher__pb2
+
+GRPC_GENERATED_VERSION = '1.64.1'
+GRPC_VERSION = grpc.__version__
+EXPECTED_ERROR_RELEASE = '1.65.0'
+SCHEDULED_RELEASE_DATE = 'June 25, 2024'
+_version_not_supported = False
+
+try:
+ from grpc._utilities import first_version_is_lower
+ _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
+except ImportError:
+ _version_not_supported = True
+
+if _version_not_supported:
+ warnings.warn(
+ f'The grpc package installed is at version {GRPC_VERSION},'
+ + f' but the generated code in dispatcher_pb2_grpc.py depends on'
+ + f' grpcio>={GRPC_GENERATED_VERSION}.'
+ + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
+ + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
+ + f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
+ + f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
+ RuntimeWarning
+ )
+
+
+class DispatcherStub(object):
+ """Missing associated documentation comment in .proto file."""
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.Register = channel.unary_unary(
+ '/Dispatcher/Register',
+ request_serializer=dispatcher__pb2.WorkerRegisterRequest.SerializeToString,
+ response_deserializer=dispatcher__pb2.WorkerRegisterResponse.FromString,
+ _registered_method=True)
+ self.Listen = channel.unary_stream(
+ '/Dispatcher/Listen',
+ request_serializer=dispatcher__pb2.WorkerListenRequest.SerializeToString,
+ response_deserializer=dispatcher__pb2.AssignedAction.FromString,
+ _registered_method=True)
+ self.ListenV2 = channel.unary_stream(
+ '/Dispatcher/ListenV2',
+ request_serializer=dispatcher__pb2.WorkerListenRequest.SerializeToString,
+ response_deserializer=dispatcher__pb2.AssignedAction.FromString,
+ _registered_method=True)
+ self.Heartbeat = channel.unary_unary(
+ '/Dispatcher/Heartbeat',
+ request_serializer=dispatcher__pb2.HeartbeatRequest.SerializeToString,
+ response_deserializer=dispatcher__pb2.HeartbeatResponse.FromString,
+ _registered_method=True)
+ self.SubscribeToWorkflowEvents = channel.unary_stream(
+ '/Dispatcher/SubscribeToWorkflowEvents',
+ request_serializer=dispatcher__pb2.SubscribeToWorkflowEventsRequest.SerializeToString,
+ response_deserializer=dispatcher__pb2.WorkflowEvent.FromString,
+ _registered_method=True)
+ self.SubscribeToWorkflowRuns = channel.stream_stream(
+ '/Dispatcher/SubscribeToWorkflowRuns',
+ request_serializer=dispatcher__pb2.SubscribeToWorkflowRunsRequest.SerializeToString,
+ response_deserializer=dispatcher__pb2.WorkflowRunEvent.FromString,
+ _registered_method=True)
+ self.SendStepActionEvent = channel.unary_unary(
+ '/Dispatcher/SendStepActionEvent',
+ request_serializer=dispatcher__pb2.StepActionEvent.SerializeToString,
+ response_deserializer=dispatcher__pb2.ActionEventResponse.FromString,
+ _registered_method=True)
+ self.SendGroupKeyActionEvent = channel.unary_unary(
+ '/Dispatcher/SendGroupKeyActionEvent',
+ request_serializer=dispatcher__pb2.GroupKeyActionEvent.SerializeToString,
+ response_deserializer=dispatcher__pb2.ActionEventResponse.FromString,
+ _registered_method=True)
+ self.PutOverridesData = channel.unary_unary(
+ '/Dispatcher/PutOverridesData',
+ request_serializer=dispatcher__pb2.OverridesData.SerializeToString,
+ response_deserializer=dispatcher__pb2.OverridesDataResponse.FromString,
+ _registered_method=True)
+ self.Unsubscribe = channel.unary_unary(
+ '/Dispatcher/Unsubscribe',
+ request_serializer=dispatcher__pb2.WorkerUnsubscribeRequest.SerializeToString,
+ response_deserializer=dispatcher__pb2.WorkerUnsubscribeResponse.FromString,
+ _registered_method=True)
+ self.RefreshTimeout = channel.unary_unary(
+ '/Dispatcher/RefreshTimeout',
+ request_serializer=dispatcher__pb2.RefreshTimeoutRequest.SerializeToString,
+ response_deserializer=dispatcher__pb2.RefreshTimeoutResponse.FromString,
+ _registered_method=True)
+ self.ReleaseSlot = channel.unary_unary(
+ '/Dispatcher/ReleaseSlot',
+ request_serializer=dispatcher__pb2.ReleaseSlotRequest.SerializeToString,
+ response_deserializer=dispatcher__pb2.ReleaseSlotResponse.FromString,
+ _registered_method=True)
+ self.UpsertWorkerLabels = channel.unary_unary(
+ '/Dispatcher/UpsertWorkerLabels',
+ request_serializer=dispatcher__pb2.UpsertWorkerLabelsRequest.SerializeToString,
+ response_deserializer=dispatcher__pb2.UpsertWorkerLabelsResponse.FromString,
+ _registered_method=True)
+
+
+class DispatcherServicer(object):
+ """Missing associated documentation comment in .proto file."""
+
+ def Register(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def Listen(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def ListenV2(self, request, context):
+ """ListenV2 is like listen, but implementation does not include heartbeats. This should only used by SDKs
+ against engine version v0.18.1+
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def Heartbeat(self, request, context):
+ """Heartbeat is a method for workers to send heartbeats to the dispatcher
+ """
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def SubscribeToWorkflowEvents(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def SubscribeToWorkflowRuns(self, request_iterator, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def SendStepActionEvent(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def SendGroupKeyActionEvent(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def PutOverridesData(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def Unsubscribe(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def RefreshTimeout(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def ReleaseSlot(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def UpsertWorkerLabels(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+
+def add_DispatcherServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ 'Register': grpc.unary_unary_rpc_method_handler(
+ servicer.Register,
+ request_deserializer=dispatcher__pb2.WorkerRegisterRequest.FromString,
+ response_serializer=dispatcher__pb2.WorkerRegisterResponse.SerializeToString,
+ ),
+ 'Listen': grpc.unary_stream_rpc_method_handler(
+ servicer.Listen,
+ request_deserializer=dispatcher__pb2.WorkerListenRequest.FromString,
+ response_serializer=dispatcher__pb2.AssignedAction.SerializeToString,
+ ),
+ 'ListenV2': grpc.unary_stream_rpc_method_handler(
+ servicer.ListenV2,
+ request_deserializer=dispatcher__pb2.WorkerListenRequest.FromString,
+ response_serializer=dispatcher__pb2.AssignedAction.SerializeToString,
+ ),
+ 'Heartbeat': grpc.unary_unary_rpc_method_handler(
+ servicer.Heartbeat,
+ request_deserializer=dispatcher__pb2.HeartbeatRequest.FromString,
+ response_serializer=dispatcher__pb2.HeartbeatResponse.SerializeToString,
+ ),
+ 'SubscribeToWorkflowEvents': grpc.unary_stream_rpc_method_handler(
+ servicer.SubscribeToWorkflowEvents,
+ request_deserializer=dispatcher__pb2.SubscribeToWorkflowEventsRequest.FromString,
+ response_serializer=dispatcher__pb2.WorkflowEvent.SerializeToString,
+ ),
+ 'SubscribeToWorkflowRuns': grpc.stream_stream_rpc_method_handler(
+ servicer.SubscribeToWorkflowRuns,
+ request_deserializer=dispatcher__pb2.SubscribeToWorkflowRunsRequest.FromString,
+ response_serializer=dispatcher__pb2.WorkflowRunEvent.SerializeToString,
+ ),
+ 'SendStepActionEvent': grpc.unary_unary_rpc_method_handler(
+ servicer.SendStepActionEvent,
+ request_deserializer=dispatcher__pb2.StepActionEvent.FromString,
+ response_serializer=dispatcher__pb2.ActionEventResponse.SerializeToString,
+ ),
+ 'SendGroupKeyActionEvent': grpc.unary_unary_rpc_method_handler(
+ servicer.SendGroupKeyActionEvent,
+ request_deserializer=dispatcher__pb2.GroupKeyActionEvent.FromString,
+ response_serializer=dispatcher__pb2.ActionEventResponse.SerializeToString,
+ ),
+ 'PutOverridesData': grpc.unary_unary_rpc_method_handler(
+ servicer.PutOverridesData,
+ request_deserializer=dispatcher__pb2.OverridesData.FromString,
+ response_serializer=dispatcher__pb2.OverridesDataResponse.SerializeToString,
+ ),
+ 'Unsubscribe': grpc.unary_unary_rpc_method_handler(
+ servicer.Unsubscribe,
+ request_deserializer=dispatcher__pb2.WorkerUnsubscribeRequest.FromString,
+ response_serializer=dispatcher__pb2.WorkerUnsubscribeResponse.SerializeToString,
+ ),
+ 'RefreshTimeout': grpc.unary_unary_rpc_method_handler(
+ servicer.RefreshTimeout,
+ request_deserializer=dispatcher__pb2.RefreshTimeoutRequest.FromString,
+ response_serializer=dispatcher__pb2.RefreshTimeoutResponse.SerializeToString,
+ ),
+ 'ReleaseSlot': grpc.unary_unary_rpc_method_handler(
+ servicer.ReleaseSlot,
+ request_deserializer=dispatcher__pb2.ReleaseSlotRequest.FromString,
+ response_serializer=dispatcher__pb2.ReleaseSlotResponse.SerializeToString,
+ ),
+ 'UpsertWorkerLabels': grpc.unary_unary_rpc_method_handler(
+ servicer.UpsertWorkerLabels,
+ request_deserializer=dispatcher__pb2.UpsertWorkerLabelsRequest.FromString,
+ response_serializer=dispatcher__pb2.UpsertWorkerLabelsResponse.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'Dispatcher', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+ server.add_registered_method_handlers('Dispatcher', rpc_method_handlers)
+
+
+ # This class is part of an EXPERIMENTAL API.
+class Dispatcher(object):
+ """Missing associated documentation comment in .proto file."""
+
+ @staticmethod
+ def Register(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/Dispatcher/Register',
+ dispatcher__pb2.WorkerRegisterRequest.SerializeToString,
+ dispatcher__pb2.WorkerRegisterResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def Listen(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_stream(
+ request,
+ target,
+ '/Dispatcher/Listen',
+ dispatcher__pb2.WorkerListenRequest.SerializeToString,
+ dispatcher__pb2.AssignedAction.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def ListenV2(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_stream(
+ request,
+ target,
+ '/Dispatcher/ListenV2',
+ dispatcher__pb2.WorkerListenRequest.SerializeToString,
+ dispatcher__pb2.AssignedAction.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def Heartbeat(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/Dispatcher/Heartbeat',
+ dispatcher__pb2.HeartbeatRequest.SerializeToString,
+ dispatcher__pb2.HeartbeatResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def SubscribeToWorkflowEvents(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_stream(
+ request,
+ target,
+ '/Dispatcher/SubscribeToWorkflowEvents',
+ dispatcher__pb2.SubscribeToWorkflowEventsRequest.SerializeToString,
+ dispatcher__pb2.WorkflowEvent.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def SubscribeToWorkflowRuns(request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.stream_stream(
+ request_iterator,
+ target,
+ '/Dispatcher/SubscribeToWorkflowRuns',
+ dispatcher__pb2.SubscribeToWorkflowRunsRequest.SerializeToString,
+ dispatcher__pb2.WorkflowRunEvent.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def SendStepActionEvent(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/Dispatcher/SendStepActionEvent',
+ dispatcher__pb2.StepActionEvent.SerializeToString,
+ dispatcher__pb2.ActionEventResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def SendGroupKeyActionEvent(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/Dispatcher/SendGroupKeyActionEvent',
+ dispatcher__pb2.GroupKeyActionEvent.SerializeToString,
+ dispatcher__pb2.ActionEventResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def PutOverridesData(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/Dispatcher/PutOverridesData',
+ dispatcher__pb2.OverridesData.SerializeToString,
+ dispatcher__pb2.OverridesDataResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def Unsubscribe(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/Dispatcher/Unsubscribe',
+ dispatcher__pb2.WorkerUnsubscribeRequest.SerializeToString,
+ dispatcher__pb2.WorkerUnsubscribeResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def RefreshTimeout(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/Dispatcher/RefreshTimeout',
+ dispatcher__pb2.RefreshTimeoutRequest.SerializeToString,
+ dispatcher__pb2.RefreshTimeoutResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def ReleaseSlot(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/Dispatcher/ReleaseSlot',
+ dispatcher__pb2.ReleaseSlotRequest.SerializeToString,
+ dispatcher__pb2.ReleaseSlotResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def UpsertWorkerLabels(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/Dispatcher/UpsertWorkerLabels',
+ dispatcher__pb2.UpsertWorkerLabelsRequest.SerializeToString,
+ dispatcher__pb2.UpsertWorkerLabelsResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/events_pb2.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/events_pb2.py
new file mode 100644
index 00000000..f0a26fce
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/events_pb2.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: events.proto
+# Protobuf Python Version: 5.26.1
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x65vents.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xb4\x01\n\x05\x45vent\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventId\x18\x02 \x01(\t\x12\x0b\n\x03key\x18\x03 \x01(\t\x12\x0f\n\x07payload\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x06 \x01(\tH\x00\x88\x01\x01\x42\x15\n\x13_additionalMetadata\" \n\x06\x45vents\x12\x16\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x06.Event\"\x92\x01\n\rPutLogRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x12\n\x05level\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x10\n\x08metadata\x18\x05 \x01(\tB\x08\n\x06_level\"\x10\n\x0ePutLogResponse\"|\n\x15PutStreamEventRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\x0c\x12\x10\n\x08metadata\x18\x05 \x01(\t\"\x18\n\x16PutStreamEventResponse\"9\n\x14\x42ulkPushEventRequest\x12!\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x11.PushEventRequest\"\x9c\x01\n\x10PushEventRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x15\n\x13_additionalMetadata\"%\n\x12ReplayEventRequest\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\t2\x88\x02\n\rEventsService\x12#\n\x04Push\x12\x11.PushEventRequest\x1a\x06.Event\"\x00\x12,\n\x08\x42ulkPush\x12\x15.BulkPushEventRequest\x1a\x07.Events\"\x00\x12\x32\n\x11ReplaySingleEvent\x12\x13.ReplayEventRequest\x1a\x06.Event\"\x00\x12+\n\x06PutLog\x12\x0e.PutLogRequest\x1a\x0f.PutLogResponse\"\x00\x12\x43\n\x0ePutStreamEvent\x12\x16.PutStreamEventRequest\x1a\x17.PutStreamEventResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'events_pb2', _globals)
+if not _descriptor._USE_C_DESCRIPTORS:
+ _globals['DESCRIPTOR']._loaded_options = None
+ _globals['DESCRIPTOR']._serialized_options = b'ZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contracts'
+ _globals['_EVENT']._serialized_start=50
+ _globals['_EVENT']._serialized_end=230
+ _globals['_EVENTS']._serialized_start=232
+ _globals['_EVENTS']._serialized_end=264
+ _globals['_PUTLOGREQUEST']._serialized_start=267
+ _globals['_PUTLOGREQUEST']._serialized_end=413
+ _globals['_PUTLOGRESPONSE']._serialized_start=415
+ _globals['_PUTLOGRESPONSE']._serialized_end=431
+ _globals['_PUTSTREAMEVENTREQUEST']._serialized_start=433
+ _globals['_PUTSTREAMEVENTREQUEST']._serialized_end=557
+ _globals['_PUTSTREAMEVENTRESPONSE']._serialized_start=559
+ _globals['_PUTSTREAMEVENTRESPONSE']._serialized_end=583
+ _globals['_BULKPUSHEVENTREQUEST']._serialized_start=585
+ _globals['_BULKPUSHEVENTREQUEST']._serialized_end=642
+ _globals['_PUSHEVENTREQUEST']._serialized_start=645
+ _globals['_PUSHEVENTREQUEST']._serialized_end=801
+ _globals['_REPLAYEVENTREQUEST']._serialized_start=803
+ _globals['_REPLAYEVENTREQUEST']._serialized_end=840
+ _globals['_EVENTSSERVICE']._serialized_start=843
+ _globals['_EVENTSSERVICE']._serialized_end=1107
+# @@protoc_insertion_point(module_scope)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/events_pb2.pyi b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/events_pb2.pyi
new file mode 100644
index 00000000..e9132fb2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/events_pb2.pyi
@@ -0,0 +1,87 @@
+from google.protobuf import timestamp_pb2 as _timestamp_pb2
+from google.protobuf.internal import containers as _containers
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
+
+DESCRIPTOR: _descriptor.FileDescriptor
+
+class Event(_message.Message):
+ __slots__ = ("tenantId", "eventId", "key", "payload", "eventTimestamp", "additionalMetadata")
+ TENANTID_FIELD_NUMBER: _ClassVar[int]
+ EVENTID_FIELD_NUMBER: _ClassVar[int]
+ KEY_FIELD_NUMBER: _ClassVar[int]
+ PAYLOAD_FIELD_NUMBER: _ClassVar[int]
+ EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int]
+ ADDITIONALMETADATA_FIELD_NUMBER: _ClassVar[int]
+ tenantId: str
+ eventId: str
+ key: str
+ payload: str
+ eventTimestamp: _timestamp_pb2.Timestamp
+ additionalMetadata: str
+ def __init__(self, tenantId: _Optional[str] = ..., eventId: _Optional[str] = ..., key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ...) -> None: ...
+
+class Events(_message.Message):
+ __slots__ = ("events",)
+ EVENTS_FIELD_NUMBER: _ClassVar[int]
+ events: _containers.RepeatedCompositeFieldContainer[Event]
+ def __init__(self, events: _Optional[_Iterable[_Union[Event, _Mapping]]] = ...) -> None: ...
+
+class PutLogRequest(_message.Message):
+ __slots__ = ("stepRunId", "createdAt", "message", "level", "metadata")
+ STEPRUNID_FIELD_NUMBER: _ClassVar[int]
+ CREATEDAT_FIELD_NUMBER: _ClassVar[int]
+ MESSAGE_FIELD_NUMBER: _ClassVar[int]
+ LEVEL_FIELD_NUMBER: _ClassVar[int]
+ METADATA_FIELD_NUMBER: _ClassVar[int]
+ stepRunId: str
+ createdAt: _timestamp_pb2.Timestamp
+ message: str
+ level: str
+ metadata: str
+ def __init__(self, stepRunId: _Optional[str] = ..., createdAt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[str] = ..., level: _Optional[str] = ..., metadata: _Optional[str] = ...) -> None: ...
+
+class PutLogResponse(_message.Message):
+ __slots__ = ()
+ def __init__(self) -> None: ...
+
+class PutStreamEventRequest(_message.Message):
+ __slots__ = ("stepRunId", "createdAt", "message", "metadata")
+ STEPRUNID_FIELD_NUMBER: _ClassVar[int]
+ CREATEDAT_FIELD_NUMBER: _ClassVar[int]
+ MESSAGE_FIELD_NUMBER: _ClassVar[int]
+ METADATA_FIELD_NUMBER: _ClassVar[int]
+ stepRunId: str
+ createdAt: _timestamp_pb2.Timestamp
+ message: bytes
+ metadata: str
+ def __init__(self, stepRunId: _Optional[str] = ..., createdAt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[bytes] = ..., metadata: _Optional[str] = ...) -> None: ...
+
+class PutStreamEventResponse(_message.Message):
+ __slots__ = ()
+ def __init__(self) -> None: ...
+
+class BulkPushEventRequest(_message.Message):
+ __slots__ = ("events",)
+ EVENTS_FIELD_NUMBER: _ClassVar[int]
+ events: _containers.RepeatedCompositeFieldContainer[PushEventRequest]
+ def __init__(self, events: _Optional[_Iterable[_Union[PushEventRequest, _Mapping]]] = ...) -> None: ...
+
+class PushEventRequest(_message.Message):
+ __slots__ = ("key", "payload", "eventTimestamp", "additionalMetadata")
+ KEY_FIELD_NUMBER: _ClassVar[int]
+ PAYLOAD_FIELD_NUMBER: _ClassVar[int]
+ EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int]
+ ADDITIONALMETADATA_FIELD_NUMBER: _ClassVar[int]
+ key: str
+ payload: str
+ eventTimestamp: _timestamp_pb2.Timestamp
+ additionalMetadata: str
+ def __init__(self, key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ...) -> None: ...
+
+class ReplayEventRequest(_message.Message):
+ __slots__ = ("eventId",)
+ EVENTID_FIELD_NUMBER: _ClassVar[int]
+ eventId: str
+ def __init__(self, eventId: _Optional[str] = ...) -> None: ...
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/events_pb2_grpc.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/events_pb2_grpc.py
new file mode 100644
index 00000000..ddea5aa4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/events_pb2_grpc.py
@@ -0,0 +1,274 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+import warnings
+
+from . import events_pb2 as events__pb2
+
+GRPC_GENERATED_VERSION = '1.64.1'
+GRPC_VERSION = grpc.__version__
+EXPECTED_ERROR_RELEASE = '1.65.0'
+SCHEDULED_RELEASE_DATE = 'June 25, 2024'
+_version_not_supported = False
+
+try:
+ from grpc._utilities import first_version_is_lower
+ _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
+except ImportError:
+ _version_not_supported = True
+
+if _version_not_supported:
+ warnings.warn(
+ f'The grpc package installed is at version {GRPC_VERSION},'
+ + f' but the generated code in events_pb2_grpc.py depends on'
+ + f' grpcio>={GRPC_GENERATED_VERSION}.'
+ + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
+ + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
+ + f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
+ + f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
+ RuntimeWarning
+ )
+
+
+class EventsServiceStub(object):
+ """Missing associated documentation comment in .proto file."""
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.Push = channel.unary_unary(
+ '/EventsService/Push',
+ request_serializer=events__pb2.PushEventRequest.SerializeToString,
+ response_deserializer=events__pb2.Event.FromString,
+ _registered_method=True)
+ self.BulkPush = channel.unary_unary(
+ '/EventsService/BulkPush',
+ request_serializer=events__pb2.BulkPushEventRequest.SerializeToString,
+ response_deserializer=events__pb2.Events.FromString,
+ _registered_method=True)
+ self.ReplaySingleEvent = channel.unary_unary(
+ '/EventsService/ReplaySingleEvent',
+ request_serializer=events__pb2.ReplayEventRequest.SerializeToString,
+ response_deserializer=events__pb2.Event.FromString,
+ _registered_method=True)
+ self.PutLog = channel.unary_unary(
+ '/EventsService/PutLog',
+ request_serializer=events__pb2.PutLogRequest.SerializeToString,
+ response_deserializer=events__pb2.PutLogResponse.FromString,
+ _registered_method=True)
+ self.PutStreamEvent = channel.unary_unary(
+ '/EventsService/PutStreamEvent',
+ request_serializer=events__pb2.PutStreamEventRequest.SerializeToString,
+ response_deserializer=events__pb2.PutStreamEventResponse.FromString,
+ _registered_method=True)
+
+
+class EventsServiceServicer(object):
+ """Missing associated documentation comment in .proto file."""
+
+ def Push(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def BulkPush(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def ReplaySingleEvent(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def PutLog(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def PutStreamEvent(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+
+def add_EventsServiceServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ 'Push': grpc.unary_unary_rpc_method_handler(
+ servicer.Push,
+ request_deserializer=events__pb2.PushEventRequest.FromString,
+ response_serializer=events__pb2.Event.SerializeToString,
+ ),
+ 'BulkPush': grpc.unary_unary_rpc_method_handler(
+ servicer.BulkPush,
+ request_deserializer=events__pb2.BulkPushEventRequest.FromString,
+ response_serializer=events__pb2.Events.SerializeToString,
+ ),
+ 'ReplaySingleEvent': grpc.unary_unary_rpc_method_handler(
+ servicer.ReplaySingleEvent,
+ request_deserializer=events__pb2.ReplayEventRequest.FromString,
+ response_serializer=events__pb2.Event.SerializeToString,
+ ),
+ 'PutLog': grpc.unary_unary_rpc_method_handler(
+ servicer.PutLog,
+ request_deserializer=events__pb2.PutLogRequest.FromString,
+ response_serializer=events__pb2.PutLogResponse.SerializeToString,
+ ),
+ 'PutStreamEvent': grpc.unary_unary_rpc_method_handler(
+ servicer.PutStreamEvent,
+ request_deserializer=events__pb2.PutStreamEventRequest.FromString,
+ response_serializer=events__pb2.PutStreamEventResponse.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'EventsService', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+ server.add_registered_method_handlers('EventsService', rpc_method_handlers)
+
+
+ # This class is part of an EXPERIMENTAL API.
+class EventsService(object):
+ """Missing associated documentation comment in .proto file."""
+
+ @staticmethod
+ def Push(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/EventsService/Push',
+ events__pb2.PushEventRequest.SerializeToString,
+ events__pb2.Event.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def BulkPush(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/EventsService/BulkPush',
+ events__pb2.BulkPushEventRequest.SerializeToString,
+ events__pb2.Events.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def ReplaySingleEvent(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/EventsService/ReplaySingleEvent',
+ events__pb2.ReplayEventRequest.SerializeToString,
+ events__pb2.Event.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def PutLog(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/EventsService/PutLog',
+ events__pb2.PutLogRequest.SerializeToString,
+ events__pb2.PutLogResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def PutStreamEvent(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/EventsService/PutStreamEvent',
+ events__pb2.PutStreamEventRequest.SerializeToString,
+ events__pb2.PutStreamEventResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/workflows_pb2.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/workflows_pb2.py
new file mode 100644
index 00000000..a2010e39
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/workflows_pb2.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: workflows.proto
+# Protobuf Python Version: 5.26.1
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xbf\x04\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x12$\n\x06sticky\x18\x0c \x01(\x0e\x32\x0f.StickyStrategyH\x03\x88\x01\x01\x12 \n\x04kind\x18\r \x01(\x0e\x32\r.WorkflowKindH\x04\x88\x01\x01\x12\x1d\n\x10\x64\x65\x66\x61ult_priority\x18\x0e \x01(\x05H\x05\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_jobB\t\n\x07_stickyB\x07\n\x05_kindB\x13\n\x11_default_priority\"\xd0\x01\n\x17WorkflowConcurrencyOpts\x12\x13\n\x06\x61\x63tion\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08max_runs\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x36\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategyH\x02\x88\x01\x01\x12\x17\n\nexpression\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\t\n\x07_actionB\x0b\n\t_max_runsB\x11\n\x0f_limit_strategyB\r\n\x0b_expression\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xe1\x01\n\x13\x44\x65siredWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12/\n\ncomparator\x18\x04 \x01(\x0e\x32\x16.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xb5\x03\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\x12@\n\rworker_labels\x18\t \x03(\x0b\x32).CreateWorkflowStepOpts.WorkerLabelsEntry\x12\x1b\n\x0e\x62\x61\x63koff_factor\x18\n \x01(\x02H\x00\x88\x01\x01\x12 \n\x13\x62\x61\x63koff_max_seconds\x18\x0b \x01(\x05H\x01\x88\x01\x01\x1aI\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.DesiredWorkerLabels:\x02\x38\x01\x42\x11\n\x0f_backoff_factorB\x16\n\x14_backoff_max_seconds\"\xfa\x01\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x12\n\x05units\x18\x02 \x01(\x05H\x00\x88\x01\x01\x12\x15\n\x08key_expr\x18\x03 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nunits_expr\x18\x04 \x01(\tH\x02\x88\x01\x01\x12\x1e\n\x11limit_values_expr\x18\x05 \x01(\tH\x03\x88\x01\x01\x12)\n\x08\x64uration\x18\x06 \x01(\x0e\x32\x12.RateLimitDurationH\x04\x88\x01\x01\x42\x08\n\x06_unitsB\x0b\n\t_key_exprB\r\n\x0b_units_exprB\x14\n\x12_limit_values_exprB\x0b\n\t_duration\"\x16\n\x14ListWorkflowsRequest\"\xcd\x02\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x08 \x01(\tH\x04\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadata\"O\n\x11ScheduledWorkflow\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ntrigger_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xe3\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x03\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\x12/\n\x13scheduled_workflows\x18\x08 \x03(\x0b\x32\x12.ScheduledWorkflow\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"H\n\x1a\x42ulkTriggerWorkflowRequest\x12*\n\tworkflows\x18\x01 \x03(\x0b\x32\x17.TriggerWorkflowRequest\"7\n\x1b\x42ulkTriggerWorkflowResponse\x12\x18\n\x10workflow_run_ids\x18\x01 \x03(\t\"\xf7\x02\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x12\x1e\n\x11\x64\x65sired_worker_id\x18\x08 \x01(\tH\x05\x88\x01\x01\x12\x15\n\x08priority\x18\t \x01(\x05H\x06\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x14\n\x12_desired_worker_idB\x0b\n\t_priority\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*2\n\x0cWorkflowKind\x12\x0c\n\x08\x46UNCTION\x10\x00\x12\x0b\n\x07\x44URABLE\x10\x01\x12\x07\n\x03\x44\x41G\x10\x02*\x7f\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03\x12\x11\n\rCANCEL_NEWEST\x10\x04*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\xdc\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12P\n\x13\x42ulkTriggerWorkflow\x12\x1b.BulkTriggerWorkflowRequest\x1a\x1c.BulkTriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'workflows_pb2', _globals)
+if not _descriptor._USE_C_DESCRIPTORS:
+ _globals['DESCRIPTOR']._loaded_options = None
+ _globals['DESCRIPTOR']._serialized_options = b'Z@github.com/hatchet-dev/hatchet/internal/services/admin/contracts'
+ _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._loaded_options = None
+ _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_options = b'8\001'
+ _globals['_STICKYSTRATEGY']._serialized_start=3401
+ _globals['_STICKYSTRATEGY']._serialized_end=3437
+ _globals['_WORKFLOWKIND']._serialized_start=3439
+ _globals['_WORKFLOWKIND']._serialized_end=3489
+ _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=3491
+ _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=3618
+ _globals['_WORKERLABELCOMPARATOR']._serialized_start=3621
+ _globals['_WORKERLABELCOMPARATOR']._serialized_end=3754
+ _globals['_RATELIMITDURATION']._serialized_start=3756
+ _globals['_RATELIMITDURATION']._serialized_end=3849
+ _globals['_PUTWORKFLOWREQUEST']._serialized_start=52
+ _globals['_PUTWORKFLOWREQUEST']._serialized_end=114
+ _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_start=117
+ _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_end=692
+ _globals['_WORKFLOWCONCURRENCYOPTS']._serialized_start=695
+ _globals['_WORKFLOWCONCURRENCYOPTS']._serialized_end=903
+ _globals['_CREATEWORKFLOWJOBOPTS']._serialized_start=905
+ _globals['_CREATEWORKFLOWJOBOPTS']._serialized_end=1009
+ _globals['_DESIREDWORKERLABELS']._serialized_start=1012
+ _globals['_DESIREDWORKERLABELS']._serialized_end=1237
+ _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_start=1240
+ _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_end=1677
+ _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_start=1561
+ _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_end=1634
+ _globals['_CREATESTEPRATELIMIT']._serialized_start=1680
+ _globals['_CREATESTEPRATELIMIT']._serialized_end=1930
+ _globals['_LISTWORKFLOWSREQUEST']._serialized_start=1932
+ _globals['_LISTWORKFLOWSREQUEST']._serialized_end=1954
+ _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_start=1957
+ _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_end=2290
+ _globals['_SCHEDULEDWORKFLOW']._serialized_start=2292
+ _globals['_SCHEDULEDWORKFLOW']._serialized_end=2371
+ _globals['_WORKFLOWVERSION']._serialized_start=2374
+ _globals['_WORKFLOWVERSION']._serialized_end=2601
+ _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=2603
+ _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=2666
+ _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=2668
+ _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=2725
+ _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_start=2727
+ _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_end=2799
+ _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_start=2801
+ _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_end=2856
+ _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=2859
+ _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=3234
+ _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=3236
+ _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=3286
+ _globals['_PUTRATELIMITREQUEST']._serialized_start=3288
+ _globals['_PUTRATELIMITREQUEST']._serialized_end=3375
+ _globals['_PUTRATELIMITRESPONSE']._serialized_start=3377
+ _globals['_PUTRATELIMITRESPONSE']._serialized_end=3399
+ _globals['_WORKFLOWSERVICE']._serialized_start=3852
+ _globals['_WORKFLOWSERVICE']._serialized_end=4200
+# @@protoc_insertion_point(module_scope)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/workflows_pb2.pyi b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/workflows_pb2.pyi
new file mode 100644
index 00000000..b406fcc4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/workflows_pb2.pyi
@@ -0,0 +1,312 @@
+from google.protobuf import timestamp_pb2 as _timestamp_pb2
+from google.protobuf.internal import containers as _containers
+from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
+
+DESCRIPTOR: _descriptor.FileDescriptor
+
+class StickyStrategy(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
+ __slots__ = ()
+ SOFT: _ClassVar[StickyStrategy]
+ HARD: _ClassVar[StickyStrategy]
+
+class WorkflowKind(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
+ __slots__ = ()
+ FUNCTION: _ClassVar[WorkflowKind]
+ DURABLE: _ClassVar[WorkflowKind]
+ DAG: _ClassVar[WorkflowKind]
+
+class ConcurrencyLimitStrategy(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
+ __slots__ = ()
+ CANCEL_IN_PROGRESS: _ClassVar[ConcurrencyLimitStrategy]
+ DROP_NEWEST: _ClassVar[ConcurrencyLimitStrategy]
+ QUEUE_NEWEST: _ClassVar[ConcurrencyLimitStrategy]
+ GROUP_ROUND_ROBIN: _ClassVar[ConcurrencyLimitStrategy]
+ CANCEL_NEWEST: _ClassVar[ConcurrencyLimitStrategy]
+
+class WorkerLabelComparator(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
+ __slots__ = ()
+ EQUAL: _ClassVar[WorkerLabelComparator]
+ NOT_EQUAL: _ClassVar[WorkerLabelComparator]
+ GREATER_THAN: _ClassVar[WorkerLabelComparator]
+ GREATER_THAN_OR_EQUAL: _ClassVar[WorkerLabelComparator]
+ LESS_THAN: _ClassVar[WorkerLabelComparator]
+ LESS_THAN_OR_EQUAL: _ClassVar[WorkerLabelComparator]
+
+class RateLimitDuration(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
+ __slots__ = ()
+ SECOND: _ClassVar[RateLimitDuration]
+ MINUTE: _ClassVar[RateLimitDuration]
+ HOUR: _ClassVar[RateLimitDuration]
+ DAY: _ClassVar[RateLimitDuration]
+ WEEK: _ClassVar[RateLimitDuration]
+ MONTH: _ClassVar[RateLimitDuration]
+ YEAR: _ClassVar[RateLimitDuration]
+SOFT: StickyStrategy
+HARD: StickyStrategy
+FUNCTION: WorkflowKind
+DURABLE: WorkflowKind
+DAG: WorkflowKind
+CANCEL_IN_PROGRESS: ConcurrencyLimitStrategy
+DROP_NEWEST: ConcurrencyLimitStrategy
+QUEUE_NEWEST: ConcurrencyLimitStrategy
+GROUP_ROUND_ROBIN: ConcurrencyLimitStrategy
+CANCEL_NEWEST: ConcurrencyLimitStrategy
+EQUAL: WorkerLabelComparator
+NOT_EQUAL: WorkerLabelComparator
+GREATER_THAN: WorkerLabelComparator
+GREATER_THAN_OR_EQUAL: WorkerLabelComparator
+LESS_THAN: WorkerLabelComparator
+LESS_THAN_OR_EQUAL: WorkerLabelComparator
+SECOND: RateLimitDuration
+MINUTE: RateLimitDuration
+HOUR: RateLimitDuration
+DAY: RateLimitDuration
+WEEK: RateLimitDuration
+MONTH: RateLimitDuration
+YEAR: RateLimitDuration
+
+class PutWorkflowRequest(_message.Message):
+ __slots__ = ("opts",)
+ OPTS_FIELD_NUMBER: _ClassVar[int]
+ opts: CreateWorkflowVersionOpts
+ def __init__(self, opts: _Optional[_Union[CreateWorkflowVersionOpts, _Mapping]] = ...) -> None: ...
+
+class CreateWorkflowVersionOpts(_message.Message):
+ __slots__ = ("name", "description", "version", "event_triggers", "cron_triggers", "scheduled_triggers", "jobs", "concurrency", "schedule_timeout", "cron_input", "on_failure_job", "sticky", "kind", "default_priority")
+ NAME_FIELD_NUMBER: _ClassVar[int]
+ DESCRIPTION_FIELD_NUMBER: _ClassVar[int]
+ VERSION_FIELD_NUMBER: _ClassVar[int]
+ EVENT_TRIGGERS_FIELD_NUMBER: _ClassVar[int]
+ CRON_TRIGGERS_FIELD_NUMBER: _ClassVar[int]
+ SCHEDULED_TRIGGERS_FIELD_NUMBER: _ClassVar[int]
+ JOBS_FIELD_NUMBER: _ClassVar[int]
+ CONCURRENCY_FIELD_NUMBER: _ClassVar[int]
+ SCHEDULE_TIMEOUT_FIELD_NUMBER: _ClassVar[int]
+ CRON_INPUT_FIELD_NUMBER: _ClassVar[int]
+ ON_FAILURE_JOB_FIELD_NUMBER: _ClassVar[int]
+ STICKY_FIELD_NUMBER: _ClassVar[int]
+ KIND_FIELD_NUMBER: _ClassVar[int]
+ DEFAULT_PRIORITY_FIELD_NUMBER: _ClassVar[int]
+ name: str
+ description: str
+ version: str
+ event_triggers: _containers.RepeatedScalarFieldContainer[str]
+ cron_triggers: _containers.RepeatedScalarFieldContainer[str]
+ scheduled_triggers: _containers.RepeatedCompositeFieldContainer[_timestamp_pb2.Timestamp]
+ jobs: _containers.RepeatedCompositeFieldContainer[CreateWorkflowJobOpts]
+ concurrency: WorkflowConcurrencyOpts
+ schedule_timeout: str
+ cron_input: str
+ on_failure_job: CreateWorkflowJobOpts
+ sticky: StickyStrategy
+ kind: WorkflowKind
+ default_priority: int
+ def __init__(self, name: _Optional[str] = ..., description: _Optional[str] = ..., version: _Optional[str] = ..., event_triggers: _Optional[_Iterable[str]] = ..., cron_triggers: _Optional[_Iterable[str]] = ..., scheduled_triggers: _Optional[_Iterable[_Union[_timestamp_pb2.Timestamp, _Mapping]]] = ..., jobs: _Optional[_Iterable[_Union[CreateWorkflowJobOpts, _Mapping]]] = ..., concurrency: _Optional[_Union[WorkflowConcurrencyOpts, _Mapping]] = ..., schedule_timeout: _Optional[str] = ..., cron_input: _Optional[str] = ..., on_failure_job: _Optional[_Union[CreateWorkflowJobOpts, _Mapping]] = ..., sticky: _Optional[_Union[StickyStrategy, str]] = ..., kind: _Optional[_Union[WorkflowKind, str]] = ..., default_priority: _Optional[int] = ...) -> None: ...
+
+class WorkflowConcurrencyOpts(_message.Message):
+ __slots__ = ("action", "max_runs", "limit_strategy", "expression")
+ ACTION_FIELD_NUMBER: _ClassVar[int]
+ MAX_RUNS_FIELD_NUMBER: _ClassVar[int]
+ LIMIT_STRATEGY_FIELD_NUMBER: _ClassVar[int]
+ EXPRESSION_FIELD_NUMBER: _ClassVar[int]
+ action: str
+ max_runs: int
+ limit_strategy: ConcurrencyLimitStrategy
+ expression: str
+ def __init__(self, action: _Optional[str] = ..., max_runs: _Optional[int] = ..., limit_strategy: _Optional[_Union[ConcurrencyLimitStrategy, str]] = ..., expression: _Optional[str] = ...) -> None: ...
+
+class CreateWorkflowJobOpts(_message.Message):
+ __slots__ = ("name", "description", "steps")
+ NAME_FIELD_NUMBER: _ClassVar[int]
+ DESCRIPTION_FIELD_NUMBER: _ClassVar[int]
+ STEPS_FIELD_NUMBER: _ClassVar[int]
+ name: str
+ description: str
+ steps: _containers.RepeatedCompositeFieldContainer[CreateWorkflowStepOpts]
+ def __init__(self, name: _Optional[str] = ..., description: _Optional[str] = ..., steps: _Optional[_Iterable[_Union[CreateWorkflowStepOpts, _Mapping]]] = ...) -> None: ...
+
+class DesiredWorkerLabels(_message.Message):
+ __slots__ = ("strValue", "intValue", "required", "comparator", "weight")
+ STRVALUE_FIELD_NUMBER: _ClassVar[int]
+ INTVALUE_FIELD_NUMBER: _ClassVar[int]
+ REQUIRED_FIELD_NUMBER: _ClassVar[int]
+ COMPARATOR_FIELD_NUMBER: _ClassVar[int]
+ WEIGHT_FIELD_NUMBER: _ClassVar[int]
+ strValue: str
+ intValue: int
+ required: bool
+ comparator: WorkerLabelComparator
+ weight: int
+ def __init__(self, strValue: _Optional[str] = ..., intValue: _Optional[int] = ..., required: bool = ..., comparator: _Optional[_Union[WorkerLabelComparator, str]] = ..., weight: _Optional[int] = ...) -> None: ...
+
+class CreateWorkflowStepOpts(_message.Message):
+ __slots__ = ("readable_id", "action", "timeout", "inputs", "parents", "user_data", "retries", "rate_limits", "worker_labels", "backoff_factor", "backoff_max_seconds")
+ class WorkerLabelsEntry(_message.Message):
+ __slots__ = ("key", "value")
+ KEY_FIELD_NUMBER: _ClassVar[int]
+ VALUE_FIELD_NUMBER: _ClassVar[int]
+ key: str
+ value: DesiredWorkerLabels
+ def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[DesiredWorkerLabels, _Mapping]] = ...) -> None: ...
+ READABLE_ID_FIELD_NUMBER: _ClassVar[int]
+ ACTION_FIELD_NUMBER: _ClassVar[int]
+ TIMEOUT_FIELD_NUMBER: _ClassVar[int]
+ INPUTS_FIELD_NUMBER: _ClassVar[int]
+ PARENTS_FIELD_NUMBER: _ClassVar[int]
+ USER_DATA_FIELD_NUMBER: _ClassVar[int]
+ RETRIES_FIELD_NUMBER: _ClassVar[int]
+ RATE_LIMITS_FIELD_NUMBER: _ClassVar[int]
+ WORKER_LABELS_FIELD_NUMBER: _ClassVar[int]
+ BACKOFF_FACTOR_FIELD_NUMBER: _ClassVar[int]
+ BACKOFF_MAX_SECONDS_FIELD_NUMBER: _ClassVar[int]
+ readable_id: str
+ action: str
+ timeout: str
+ inputs: str
+ parents: _containers.RepeatedScalarFieldContainer[str]
+ user_data: str
+ retries: int
+ rate_limits: _containers.RepeatedCompositeFieldContainer[CreateStepRateLimit]
+ worker_labels: _containers.MessageMap[str, DesiredWorkerLabels]
+ backoff_factor: float
+ backoff_max_seconds: int
+ def __init__(self, readable_id: _Optional[str] = ..., action: _Optional[str] = ..., timeout: _Optional[str] = ..., inputs: _Optional[str] = ..., parents: _Optional[_Iterable[str]] = ..., user_data: _Optional[str] = ..., retries: _Optional[int] = ..., rate_limits: _Optional[_Iterable[_Union[CreateStepRateLimit, _Mapping]]] = ..., worker_labels: _Optional[_Mapping[str, DesiredWorkerLabels]] = ..., backoff_factor: _Optional[float] = ..., backoff_max_seconds: _Optional[int] = ...) -> None: ...
+
+class CreateStepRateLimit(_message.Message):
+ __slots__ = ("key", "units", "key_expr", "units_expr", "limit_values_expr", "duration")
+ KEY_FIELD_NUMBER: _ClassVar[int]
+ UNITS_FIELD_NUMBER: _ClassVar[int]
+ KEY_EXPR_FIELD_NUMBER: _ClassVar[int]
+ UNITS_EXPR_FIELD_NUMBER: _ClassVar[int]
+ LIMIT_VALUES_EXPR_FIELD_NUMBER: _ClassVar[int]
+ DURATION_FIELD_NUMBER: _ClassVar[int]
+ key: str
+ units: int
+ key_expr: str
+ units_expr: str
+ limit_values_expr: str
+ duration: RateLimitDuration
+ def __init__(self, key: _Optional[str] = ..., units: _Optional[int] = ..., key_expr: _Optional[str] = ..., units_expr: _Optional[str] = ..., limit_values_expr: _Optional[str] = ..., duration: _Optional[_Union[RateLimitDuration, str]] = ...) -> None: ...
+
+class ListWorkflowsRequest(_message.Message):
+ __slots__ = ()
+ def __init__(self) -> None: ...
+
+class ScheduleWorkflowRequest(_message.Message):
+ __slots__ = ("name", "schedules", "input", "parent_id", "parent_step_run_id", "child_index", "child_key", "additional_metadata")
+ NAME_FIELD_NUMBER: _ClassVar[int]
+ SCHEDULES_FIELD_NUMBER: _ClassVar[int]
+ INPUT_FIELD_NUMBER: _ClassVar[int]
+ PARENT_ID_FIELD_NUMBER: _ClassVar[int]
+ PARENT_STEP_RUN_ID_FIELD_NUMBER: _ClassVar[int]
+ CHILD_INDEX_FIELD_NUMBER: _ClassVar[int]
+ CHILD_KEY_FIELD_NUMBER: _ClassVar[int]
+ ADDITIONAL_METADATA_FIELD_NUMBER: _ClassVar[int]
+ name: str
+ schedules: _containers.RepeatedCompositeFieldContainer[_timestamp_pb2.Timestamp]
+ input: str
+ parent_id: str
+ parent_step_run_id: str
+ child_index: int
+ child_key: str
+ additional_metadata: str
+ def __init__(self, name: _Optional[str] = ..., schedules: _Optional[_Iterable[_Union[_timestamp_pb2.Timestamp, _Mapping]]] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_step_run_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ..., additional_metadata: _Optional[str] = ...) -> None: ...
+
+class ScheduledWorkflow(_message.Message):
+ __slots__ = ("id", "trigger_at")
+ ID_FIELD_NUMBER: _ClassVar[int]
+ TRIGGER_AT_FIELD_NUMBER: _ClassVar[int]
+ id: str
+ trigger_at: _timestamp_pb2.Timestamp
+ def __init__(self, id: _Optional[str] = ..., trigger_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ...
+
+class WorkflowVersion(_message.Message):
+ __slots__ = ("id", "created_at", "updated_at", "version", "order", "workflow_id", "scheduled_workflows")
+ ID_FIELD_NUMBER: _ClassVar[int]
+ CREATED_AT_FIELD_NUMBER: _ClassVar[int]
+ UPDATED_AT_FIELD_NUMBER: _ClassVar[int]
+ VERSION_FIELD_NUMBER: _ClassVar[int]
+ ORDER_FIELD_NUMBER: _ClassVar[int]
+ WORKFLOW_ID_FIELD_NUMBER: _ClassVar[int]
+ SCHEDULED_WORKFLOWS_FIELD_NUMBER: _ClassVar[int]
+ id: str
+ created_at: _timestamp_pb2.Timestamp
+ updated_at: _timestamp_pb2.Timestamp
+ version: str
+ order: int
+ workflow_id: str
+ scheduled_workflows: _containers.RepeatedCompositeFieldContainer[ScheduledWorkflow]
+ def __init__(self, id: _Optional[str] = ..., created_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., updated_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., version: _Optional[str] = ..., order: _Optional[int] = ..., workflow_id: _Optional[str] = ..., scheduled_workflows: _Optional[_Iterable[_Union[ScheduledWorkflow, _Mapping]]] = ...) -> None: ...
+
+class WorkflowTriggerEventRef(_message.Message):
+ __slots__ = ("parent_id", "event_key")
+ PARENT_ID_FIELD_NUMBER: _ClassVar[int]
+ EVENT_KEY_FIELD_NUMBER: _ClassVar[int]
+ parent_id: str
+ event_key: str
+ def __init__(self, parent_id: _Optional[str] = ..., event_key: _Optional[str] = ...) -> None: ...
+
+class WorkflowTriggerCronRef(_message.Message):
+ __slots__ = ("parent_id", "cron")
+ PARENT_ID_FIELD_NUMBER: _ClassVar[int]
+ CRON_FIELD_NUMBER: _ClassVar[int]
+ parent_id: str
+ cron: str
+ def __init__(self, parent_id: _Optional[str] = ..., cron: _Optional[str] = ...) -> None: ...
+
+class BulkTriggerWorkflowRequest(_message.Message):
+ __slots__ = ("workflows",)
+ WORKFLOWS_FIELD_NUMBER: _ClassVar[int]
+ workflows: _containers.RepeatedCompositeFieldContainer[TriggerWorkflowRequest]
+ def __init__(self, workflows: _Optional[_Iterable[_Union[TriggerWorkflowRequest, _Mapping]]] = ...) -> None: ...
+
+class BulkTriggerWorkflowResponse(_message.Message):
+ __slots__ = ("workflow_run_ids",)
+ WORKFLOW_RUN_IDS_FIELD_NUMBER: _ClassVar[int]
+ workflow_run_ids: _containers.RepeatedScalarFieldContainer[str]
+ def __init__(self, workflow_run_ids: _Optional[_Iterable[str]] = ...) -> None: ...
+
+class TriggerWorkflowRequest(_message.Message):
+ __slots__ = ("name", "input", "parent_id", "parent_step_run_id", "child_index", "child_key", "additional_metadata", "desired_worker_id", "priority")
+ NAME_FIELD_NUMBER: _ClassVar[int]
+ INPUT_FIELD_NUMBER: _ClassVar[int]
+ PARENT_ID_FIELD_NUMBER: _ClassVar[int]
+ PARENT_STEP_RUN_ID_FIELD_NUMBER: _ClassVar[int]
+ CHILD_INDEX_FIELD_NUMBER: _ClassVar[int]
+ CHILD_KEY_FIELD_NUMBER: _ClassVar[int]
+ ADDITIONAL_METADATA_FIELD_NUMBER: _ClassVar[int]
+ DESIRED_WORKER_ID_FIELD_NUMBER: _ClassVar[int]
+ PRIORITY_FIELD_NUMBER: _ClassVar[int]
+ name: str
+ input: str
+ parent_id: str
+ parent_step_run_id: str
+ child_index: int
+ child_key: str
+ additional_metadata: str
+ desired_worker_id: str
+ priority: int
+ def __init__(self, name: _Optional[str] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_step_run_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ..., additional_metadata: _Optional[str] = ..., desired_worker_id: _Optional[str] = ..., priority: _Optional[int] = ...) -> None: ...
+
+class TriggerWorkflowResponse(_message.Message):
+ __slots__ = ("workflow_run_id",)
+ WORKFLOW_RUN_ID_FIELD_NUMBER: _ClassVar[int]
+ workflow_run_id: str
+ def __init__(self, workflow_run_id: _Optional[str] = ...) -> None: ...
+
+class PutRateLimitRequest(_message.Message):
+ __slots__ = ("key", "limit", "duration")
+ KEY_FIELD_NUMBER: _ClassVar[int]
+ LIMIT_FIELD_NUMBER: _ClassVar[int]
+ DURATION_FIELD_NUMBER: _ClassVar[int]
+ key: str
+ limit: int
+ duration: RateLimitDuration
+ def __init__(self, key: _Optional[str] = ..., limit: _Optional[int] = ..., duration: _Optional[_Union[RateLimitDuration, str]] = ...) -> None: ...
+
+class PutRateLimitResponse(_message.Message):
+ __slots__ = ()
+ def __init__(self) -> None: ...
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/workflows_pb2_grpc.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/workflows_pb2_grpc.py
new file mode 100644
index 00000000..383170e2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/contracts/workflows_pb2_grpc.py
@@ -0,0 +1,277 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+import warnings
+
+from . import workflows_pb2 as workflows__pb2
+
+GRPC_GENERATED_VERSION = '1.64.1'
+GRPC_VERSION = grpc.__version__
+EXPECTED_ERROR_RELEASE = '1.65.0'
+SCHEDULED_RELEASE_DATE = 'June 25, 2024'
+_version_not_supported = False
+
+try:
+ from grpc._utilities import first_version_is_lower
+ _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
+except ImportError:
+ _version_not_supported = True
+
+if _version_not_supported:
+ warnings.warn(
+ f'The grpc package installed is at version {GRPC_VERSION},'
+ + f' but the generated code in workflows_pb2_grpc.py depends on'
+ + f' grpcio>={GRPC_GENERATED_VERSION}.'
+ + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
+ + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
+ + f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
+ + f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
+ RuntimeWarning
+ )
+
+
+class WorkflowServiceStub(object):
+ """WorkflowService represents a set of RPCs for managing workflows.
+ """
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.PutWorkflow = channel.unary_unary(
+ '/WorkflowService/PutWorkflow',
+ request_serializer=workflows__pb2.PutWorkflowRequest.SerializeToString,
+ response_deserializer=workflows__pb2.WorkflowVersion.FromString,
+ _registered_method=True)
+ self.ScheduleWorkflow = channel.unary_unary(
+ '/WorkflowService/ScheduleWorkflow',
+ request_serializer=workflows__pb2.ScheduleWorkflowRequest.SerializeToString,
+ response_deserializer=workflows__pb2.WorkflowVersion.FromString,
+ _registered_method=True)
+ self.TriggerWorkflow = channel.unary_unary(
+ '/WorkflowService/TriggerWorkflow',
+ request_serializer=workflows__pb2.TriggerWorkflowRequest.SerializeToString,
+ response_deserializer=workflows__pb2.TriggerWorkflowResponse.FromString,
+ _registered_method=True)
+ self.BulkTriggerWorkflow = channel.unary_unary(
+ '/WorkflowService/BulkTriggerWorkflow',
+ request_serializer=workflows__pb2.BulkTriggerWorkflowRequest.SerializeToString,
+ response_deserializer=workflows__pb2.BulkTriggerWorkflowResponse.FromString,
+ _registered_method=True)
+ self.PutRateLimit = channel.unary_unary(
+ '/WorkflowService/PutRateLimit',
+ request_serializer=workflows__pb2.PutRateLimitRequest.SerializeToString,
+ response_deserializer=workflows__pb2.PutRateLimitResponse.FromString,
+ _registered_method=True)
+
+
+class WorkflowServiceServicer(object):
+ """WorkflowService represents a set of RPCs for managing workflows.
+ """
+
+ def PutWorkflow(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def ScheduleWorkflow(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def TriggerWorkflow(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def BulkTriggerWorkflow(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def PutRateLimit(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+
+def add_WorkflowServiceServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ 'PutWorkflow': grpc.unary_unary_rpc_method_handler(
+ servicer.PutWorkflow,
+ request_deserializer=workflows__pb2.PutWorkflowRequest.FromString,
+ response_serializer=workflows__pb2.WorkflowVersion.SerializeToString,
+ ),
+ 'ScheduleWorkflow': grpc.unary_unary_rpc_method_handler(
+ servicer.ScheduleWorkflow,
+ request_deserializer=workflows__pb2.ScheduleWorkflowRequest.FromString,
+ response_serializer=workflows__pb2.WorkflowVersion.SerializeToString,
+ ),
+ 'TriggerWorkflow': grpc.unary_unary_rpc_method_handler(
+ servicer.TriggerWorkflow,
+ request_deserializer=workflows__pb2.TriggerWorkflowRequest.FromString,
+ response_serializer=workflows__pb2.TriggerWorkflowResponse.SerializeToString,
+ ),
+ 'BulkTriggerWorkflow': grpc.unary_unary_rpc_method_handler(
+ servicer.BulkTriggerWorkflow,
+ request_deserializer=workflows__pb2.BulkTriggerWorkflowRequest.FromString,
+ response_serializer=workflows__pb2.BulkTriggerWorkflowResponse.SerializeToString,
+ ),
+ 'PutRateLimit': grpc.unary_unary_rpc_method_handler(
+ servicer.PutRateLimit,
+ request_deserializer=workflows__pb2.PutRateLimitRequest.FromString,
+ response_serializer=workflows__pb2.PutRateLimitResponse.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'WorkflowService', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+ server.add_registered_method_handlers('WorkflowService', rpc_method_handlers)
+
+
+ # This class is part of an EXPERIMENTAL API.
+class WorkflowService(object):
+ """WorkflowService represents a set of RPCs for managing workflows.
+ """
+
+ @staticmethod
+ def PutWorkflow(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/WorkflowService/PutWorkflow',
+ workflows__pb2.PutWorkflowRequest.SerializeToString,
+ workflows__pb2.WorkflowVersion.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def ScheduleWorkflow(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/WorkflowService/ScheduleWorkflow',
+ workflows__pb2.ScheduleWorkflowRequest.SerializeToString,
+ workflows__pb2.WorkflowVersion.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def TriggerWorkflow(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/WorkflowService/TriggerWorkflow',
+ workflows__pb2.TriggerWorkflowRequest.SerializeToString,
+ workflows__pb2.TriggerWorkflowResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def BulkTriggerWorkflow(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/WorkflowService/BulkTriggerWorkflow',
+ workflows__pb2.BulkTriggerWorkflowRequest.SerializeToString,
+ workflows__pb2.BulkTriggerWorkflowResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
+
+ @staticmethod
+ def PutRateLimit(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ '/WorkflowService/PutRateLimit',
+ workflows__pb2.PutRateLimitRequest.SerializeToString,
+ workflows__pb2.PutRateLimitResponse.FromString,
+ options,
+ channel_credentials,
+ insecure,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ _registered_method=True)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/features/cron.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/features/cron.py
new file mode 100644
index 00000000..c54e5b3b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/features/cron.py
@@ -0,0 +1,286 @@
+from typing import Union
+
+from pydantic import BaseModel, field_validator
+
+from hatchet_sdk.client import Client
+from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows
+from hatchet_sdk.clients.rest.models.cron_workflows_list import CronWorkflowsList
+from hatchet_sdk.clients.rest.models.cron_workflows_order_by_field import (
+ CronWorkflowsOrderByField,
+)
+from hatchet_sdk.clients.rest.models.workflow_run_order_by_direction import (
+ WorkflowRunOrderByDirection,
+)
+
+
+class CreateCronTriggerInput(BaseModel):
+ """
+ Schema for creating a workflow run triggered by a cron.
+
+ Attributes:
+ expression (str): The cron expression defining the schedule.
+ input (dict): The input data for the cron workflow.
+ additional_metadata (dict[str, str]): Additional metadata associated with the cron trigger (e.g. {"key1": "value1", "key2": "value2"}).
+ """
+
+ expression: str = None
+ input: dict = {}
+ additional_metadata: dict[str, str] = {}
+
+ @field_validator("expression")
+ def validate_cron_expression(cls, v):
+ """
+ Validates the cron expression to ensure it adheres to the expected format.
+
+ Args:
+ v (str): The cron expression to validate.
+
+ Raises:
+ ValueError: If the expression is invalid.
+
+ Returns:
+ str: The validated cron expression.
+ """
+ if not v:
+ raise ValueError("Cron expression is required")
+
+ parts = v.split()
+ if len(parts) != 5:
+ raise ValueError(
+ "Cron expression must have 5 parts: minute hour day month weekday"
+ )
+
+ for part in parts:
+ if not (
+ part == "*"
+ or part.replace("*/", "").replace("-", "").replace(",", "").isdigit()
+ ):
+ raise ValueError(f"Invalid cron expression part: {part}")
+
+ return v
+
+
+class CronClient:
+ """
+ Client for managing workflow cron triggers synchronously.
+
+ Attributes:
+ _client (Client): The underlying client used to interact with the REST API.
+ aio (CronClientAsync): Asynchronous counterpart of CronClient.
+ """
+
+ _client: Client
+
+ def __init__(self, _client: Client):
+ """
+ Initializes the CronClient with a given Client instance.
+
+ Args:
+ _client (Client): The client instance to be used for REST interactions.
+ """
+ self._client = _client
+ self.aio = CronClientAsync(_client)
+
+ def create(
+ self,
+ workflow_name: str,
+ cron_name: str,
+ expression: str,
+ input: dict,
+ additional_metadata: dict[str, str],
+ ) -> CronWorkflows:
+ """
+ Creates a new workflow cron trigger.
+
+ Args:
+ workflow_name (str): The name of the workflow to trigger.
+ cron_name (str): The name of the cron trigger.
+ expression (str): The cron expression defining the schedule.
+ input (dict): The input data for the cron workflow.
+ additional_metadata (dict[str, str]): Additional metadata associated with the cron trigger (e.g. {"key1": "value1", "key2": "value2"}).
+
+ Returns:
+ CronWorkflows: The created cron workflow instance.
+ """
+ validated_input = CreateCronTriggerInput(
+ expression=expression, input=input, additional_metadata=additional_metadata
+ )
+
+ return self._client.rest.cron_create(
+ workflow_name,
+ cron_name,
+ validated_input.expression,
+ validated_input.input,
+ validated_input.additional_metadata,
+ )
+
+ def delete(self, cron_trigger: Union[str, CronWorkflows]) -> None:
+ """
+ Deletes a workflow cron trigger.
+
+ Args:
+ cron_trigger (Union[str, CronWorkflows]): The cron trigger ID or CronWorkflows instance to delete.
+ """
+ id_ = cron_trigger
+ if isinstance(cron_trigger, CronWorkflows):
+ id_ = cron_trigger.metadata.id
+ self._client.rest.cron_delete(id_)
+
+ def list(
+ self,
+ offset: int | None = None,
+ limit: int | None = None,
+ workflow_id: str | None = None,
+ additional_metadata: list[str] | None = None,
+ order_by_field: CronWorkflowsOrderByField | None = None,
+ order_by_direction: WorkflowRunOrderByDirection | None = None,
+ ) -> CronWorkflowsList:
+ """
+ Retrieves a list of all workflow cron triggers matching the criteria.
+
+ Args:
+ offset (int | None): The offset to start the list from.
+ limit (int | None): The maximum number of items to return.
+ workflow_id (str | None): The ID of the workflow to filter by.
+ additional_metadata (list[str] | None): Filter by additional metadata keys (e.g. ["key1:value1", "key2:value2"]).
+ order_by_field (CronWorkflowsOrderByField | None): The field to order the list by.
+ order_by_direction (WorkflowRunOrderByDirection | None): The direction to order the list by.
+
+ Returns:
+ CronWorkflowsList: A list of cron workflows.
+ """
+ return self._client.rest.cron_list(
+ offset=offset,
+ limit=limit,
+ workflow_id=workflow_id,
+ additional_metadata=additional_metadata,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ )
+
+ def get(self, cron_trigger: Union[str, CronWorkflows]) -> CronWorkflows:
+ """
+ Retrieves a specific workflow cron trigger by ID.
+
+ Args:
+ cron_trigger (Union[str, CronWorkflows]): The cron trigger ID or CronWorkflows instance to retrieve.
+
+ Returns:
+ CronWorkflows: The requested cron workflow instance.
+ """
+ id_ = cron_trigger
+ if isinstance(cron_trigger, CronWorkflows):
+ id_ = cron_trigger.metadata.id
+ return self._client.rest.cron_get(id_)
+
+
+class CronClientAsync:
+ """
+ Asynchronous client for managing workflow cron triggers.
+
+ Attributes:
+ _client (Client): The underlying client used to interact with the REST API asynchronously.
+ """
+
+ _client: Client
+
+ def __init__(self, _client: Client):
+ """
+ Initializes the CronClientAsync with a given Client instance.
+
+ Args:
+ _client (Client): The client instance to be used for asynchronous REST interactions.
+ """
+ self._client = _client
+
+ async def create(
+ self,
+ workflow_name: str,
+ cron_name: str,
+ expression: str,
+ input: dict,
+ additional_metadata: dict[str, str],
+ ) -> CronWorkflows:
+ """
+ Asynchronously creates a new workflow cron trigger.
+
+ Args:
+ workflow_name (str): The name of the workflow to trigger.
+ cron_name (str): The name of the cron trigger.
+ expression (str): The cron expression defining the schedule.
+ input (dict): The input data for the cron workflow.
+ additional_metadata (dict[str, str]): Additional metadata associated with the cron trigger (e.g. {"key1": "value1", "key2": "value2"}).
+
+ Returns:
+ CronWorkflows: The created cron workflow instance.
+ """
+ validated_input = CreateCronTriggerInput(
+ expression=expression, input=input, additional_metadata=additional_metadata
+ )
+
+ return await self._client.rest.aio.cron_create(
+ workflow_name=workflow_name,
+ cron_name=cron_name,
+ expression=validated_input.expression,
+ input=validated_input.input,
+ additional_metadata=validated_input.additional_metadata,
+ )
+
+ async def delete(self, cron_trigger: Union[str, CronWorkflows]) -> None:
+ """
+ Asynchronously deletes a workflow cron trigger.
+
+ Args:
+ cron_trigger (Union[str, CronWorkflows]): The cron trigger ID or CronWorkflows instance to delete.
+ """
+ id_ = cron_trigger
+ if isinstance(cron_trigger, CronWorkflows):
+ id_ = cron_trigger.metadata.id
+ await self._client.rest.aio.cron_delete(id_)
+
+ async def list(
+ self,
+ offset: int | None = None,
+ limit: int | None = None,
+ workflow_id: str | None = None,
+ additional_metadata: list[str] | None = None,
+ order_by_field: CronWorkflowsOrderByField | None = None,
+ order_by_direction: WorkflowRunOrderByDirection | None = None,
+ ) -> CronWorkflowsList:
+ """
+ Asynchronously retrieves a list of all workflow cron triggers matching the criteria.
+
+ Args:
+ offset (int | None): The offset to start the list from.
+ limit (int | None): The maximum number of items to return.
+ workflow_id (str | None): The ID of the workflow to filter by.
+ additional_metadata (list[str] | None): Filter by additional metadata keys (e.g. ["key1:value1", "key2:value2"]).
+ order_by_field (CronWorkflowsOrderByField | None): The field to order the list by.
+ order_by_direction (WorkflowRunOrderByDirection | None): The direction to order the list by.
+
+ Returns:
+ CronWorkflowsList: A list of cron workflows.
+ """
+ return await self._client.rest.aio.cron_list(
+ offset=offset,
+ limit=limit,
+ workflow_id=workflow_id,
+ additional_metadata=additional_metadata,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ )
+
+ async def get(self, cron_trigger: Union[str, CronWorkflows]) -> CronWorkflows:
+ """
+ Asynchronously retrieves a specific workflow cron trigger by ID.
+
+ Args:
+ cron_trigger (Union[str, CronWorkflows]): The cron trigger ID or CronWorkflows instance to retrieve.
+
+ Returns:
+ CronWorkflows: The requested cron workflow instance.
+ """
+ id_ = cron_trigger
+ if isinstance(cron_trigger, CronWorkflows):
+ id_ = cron_trigger.metadata.id
+ return await self._client.rest.aio.cron_get(id_)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/features/scheduled.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/features/scheduled.py
new file mode 100644
index 00000000..45af2609
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/features/scheduled.py
@@ -0,0 +1,248 @@
+import datetime
+from typing import Any, Coroutine, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+from hatchet_sdk.client import Client
+from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows
+from hatchet_sdk.clients.rest.models.cron_workflows_order_by_field import (
+ CronWorkflowsOrderByField,
+)
+from hatchet_sdk.clients.rest.models.scheduled_workflows import ScheduledWorkflows
+from hatchet_sdk.clients.rest.models.scheduled_workflows_list import (
+ ScheduledWorkflowsList,
+)
+from hatchet_sdk.clients.rest.models.workflow_run_order_by_direction import (
+ WorkflowRunOrderByDirection,
+)
+
+
+class CreateScheduledTriggerInput(BaseModel):
+ """
+ Schema for creating a scheduled workflow run.
+
+ Attributes:
+ input (Dict[str, Any]): The input data for the scheduled workflow.
+ additional_metadata (Dict[str, str]): Additional metadata associated with the future run (e.g. ["key1:value1", "key2:value2"]).
+ trigger_at (Optional[datetime.datetime]): The datetime when the run should be triggered.
+ """
+
+ input: Dict[str, Any] = {}
+ additional_metadata: Dict[str, str] = {}
+ trigger_at: Optional[datetime.datetime] = None
+
+
+class ScheduledClient:
+ """
+ Client for managing scheduled workflows synchronously.
+
+ Attributes:
+ _client (Client): The underlying client used to interact with the REST API.
+ aio (ScheduledClientAsync): Asynchronous counterpart of ScheduledClient.
+ """
+
+ _client: Client
+
+ def __init__(self, _client: Client) -> None:
+ """
+ Initializes the ScheduledClient with a given Client instance.
+
+ Args:
+ _client (Client): The client instance to be used for REST interactions.
+ """
+ self._client = _client
+ self.aio: "ScheduledClientAsync" = ScheduledClientAsync(_client)
+
+ def create(
+ self,
+ workflow_name: str,
+ trigger_at: datetime.datetime,
+ input: Dict[str, Any],
+ additional_metadata: Dict[str, str],
+ ) -> ScheduledWorkflows:
+ """
+ Creates a new scheduled workflow run asynchronously.
+
+ Args:
+ workflow_name (str): The name of the scheduled workflow.
+ trigger_at (datetime.datetime): The datetime when the run should be triggered.
+ input (Dict[str, Any]): The input data for the scheduled workflow.
+ additional_metadata (Dict[str, str]): Additional metadata associated with the future run as a key-value pair (e.g. {"key1": "value1", "key2": "value2"}).
+
+ Returns:
+ ScheduledWorkflows: The created scheduled workflow instance.
+ """
+
+ validated_input = CreateScheduledTriggerInput(
+ trigger_at=trigger_at, input=input, additional_metadata=additional_metadata
+ )
+
+ return self._client.rest.schedule_create(
+ workflow_name,
+ validated_input.trigger_at,
+ validated_input.input,
+ validated_input.additional_metadata,
+ )
+
+ def delete(self, scheduled: Union[str, ScheduledWorkflows]) -> None:
+ """
+ Deletes a scheduled workflow run.
+
+ Args:
+ scheduled (Union[str, ScheduledWorkflows]): The scheduled workflow trigger ID or ScheduledWorkflows instance to delete.
+ """
+ id_ = scheduled
+ if isinstance(scheduled, ScheduledWorkflows):
+ id_ = scheduled.metadata.id
+ self._client.rest.schedule_delete(id_)
+
+ def list(
+ self,
+ offset: Optional[int] = None,
+ limit: Optional[int] = None,
+ workflow_id: Optional[str] = None,
+ additional_metadata: Optional[List[str]] = None,
+ order_by_field: Optional[CronWorkflowsOrderByField] = None,
+ order_by_direction: Optional[WorkflowRunOrderByDirection] = None,
+ ) -> ScheduledWorkflowsList:
+ """
+ Retrieves a list of scheduled workflows based on provided filters.
+
+ Args:
+ offset (Optional[int]): The starting point for the list.
+ limit (Optional[int]): The maximum number of items to return.
+ workflow_id (Optional[str]): Filter by specific workflow ID.
+ additional_metadata (Optional[List[str]]): Filter by additional metadata keys (e.g. ["key1:value1", "key2:value2"]).
+ order_by_field (Optional[CronWorkflowsOrderByField]): Field to order the results by.
+ order_by_direction (Optional[WorkflowRunOrderByDirection]): Direction to order the results.
+
+ Returns:
+ List[ScheduledWorkflows]: A list of scheduled workflows matching the criteria.
+ """
+ return self._client.rest.schedule_list(
+ offset=offset,
+ limit=limit,
+ workflow_id=workflow_id,
+ additional_metadata=additional_metadata,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ )
+
+ def get(self, scheduled: Union[str, ScheduledWorkflows]) -> ScheduledWorkflows:
+ """
+ Retrieves a specific scheduled workflow by scheduled run trigger ID.
+
+ Args:
+ scheduled (Union[str, ScheduledWorkflows]): The scheduled workflow trigger ID or ScheduledWorkflows instance to retrieve.
+
+ Returns:
+ ScheduledWorkflows: The requested scheduled workflow instance.
+ """
+ id_ = scheduled
+ if isinstance(scheduled, ScheduledWorkflows):
+ id_ = scheduled.metadata.id
+ return self._client.rest.schedule_get(id_)
+
+
+class ScheduledClientAsync:
+ """
+ Asynchronous client for managing scheduled workflows.
+
+ Attributes:
+ _client (Client): The underlying client used to interact with the REST API asynchronously.
+ """
+
+ _client: Client
+
+ def __init__(self, _client: Client) -> None:
+ """
+ Initializes the ScheduledClientAsync with a given Client instance.
+
+ Args:
+ _client (Client): The client instance to be used for asynchronous REST interactions.
+ """
+ self._client = _client
+
+ async def create(
+ self,
+ workflow_name: str,
+ trigger_at: datetime.datetime,
+ input: Dict[str, Any],
+ additional_metadata: Dict[str, str],
+ ) -> ScheduledWorkflows:
+ """
+ Creates a new scheduled workflow run asynchronously.
+
+ Args:
+ workflow_name (str): The name of the scheduled workflow.
+ trigger_at (datetime.datetime): The datetime when the run should be triggered.
+ input (Dict[str, Any]): The input data for the scheduled workflow.
+ additional_metadata (Dict[str, str]): Additional metadata associated with the future run.
+
+ Returns:
+ ScheduledWorkflows: The created scheduled workflow instance.
+ """
+ return await self._client.rest.aio.schedule_create(
+ workflow_name, trigger_at, input, additional_metadata
+ )
+
+ async def delete(self, scheduled: Union[str, ScheduledWorkflows]) -> None:
+ """
+ Deletes a scheduled workflow asynchronously.
+
+ Args:
+ scheduled (Union[str, ScheduledWorkflows]): The scheduled workflow trigger ID or ScheduledWorkflows instance to delete.
+ """
+ id_ = scheduled
+ if isinstance(scheduled, ScheduledWorkflows):
+ id_ = scheduled.metadata.id
+ await self._client.rest.aio.schedule_delete(id_)
+
+ async def list(
+ self,
+ offset: Optional[int] = None,
+ limit: Optional[int] = None,
+ workflow_id: Optional[str] = None,
+ additional_metadata: Optional[List[str]] = None,
+ order_by_field: Optional[CronWorkflowsOrderByField] = None,
+ order_by_direction: Optional[WorkflowRunOrderByDirection] = None,
+ ) -> ScheduledWorkflowsList:
+ """
+ Retrieves a list of scheduled workflows based on provided filters asynchronously.
+
+ Args:
+ offset (Optional[int]): The starting point for the list.
+ limit (Optional[int]): The maximum number of items to return.
+ workflow_id (Optional[str]): Filter by specific workflow ID.
+ additional_metadata (Optional[List[str]]): Filter by additional metadata keys (e.g. ["key1:value1", "key2:value2"]).
+ order_by_field (Optional[CronWorkflowsOrderByField]): Field to order the results by.
+ order_by_direction (Optional[WorkflowRunOrderByDirection]): Direction to order the results.
+
+ Returns:
+ ScheduledWorkflowsList: A list of scheduled workflows matching the criteria.
+ """
+ return await self._client.rest.aio.schedule_list(
+ offset=offset,
+ limit=limit,
+ workflow_id=workflow_id,
+ additional_metadata=additional_metadata,
+ order_by_field=order_by_field,
+ order_by_direction=order_by_direction,
+ )
+
+ async def get(
+ self, scheduled: Union[str, ScheduledWorkflows]
+ ) -> ScheduledWorkflows:
+ """
+ Retrieves a specific scheduled workflow by scheduled run trigger ID asynchronously.
+
+ Args:
+ scheduled (Union[str, ScheduledWorkflows]): The scheduled workflow trigger ID or ScheduledWorkflows instance to retrieve.
+
+ Returns:
+ ScheduledWorkflows: The requested scheduled workflow instance.
+ """
+ id_ = scheduled
+ if isinstance(scheduled, ScheduledWorkflows):
+ id_ = scheduled.metadata.id
+ return await self._client.rest.aio.schedule_get(id_)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/hatchet.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/hatchet.py
new file mode 100644
index 00000000..bf0e9089
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/hatchet.py
@@ -0,0 +1,310 @@
+import asyncio
+import logging
+from typing import Any, Callable, Optional, ParamSpec, Type, TypeVar, Union
+
+from pydantic import BaseModel
+from typing_extensions import deprecated
+
+from hatchet_sdk.clients.rest_client import RestApi
+from hatchet_sdk.context.context import Context
+from hatchet_sdk.contracts.workflows_pb2 import (
+ ConcurrencyLimitStrategy,
+ CreateStepRateLimit,
+ DesiredWorkerLabels,
+ StickyStrategy,
+)
+from hatchet_sdk.features.cron import CronClient
+from hatchet_sdk.features.scheduled import ScheduledClient
+from hatchet_sdk.labels import DesiredWorkerLabel
+from hatchet_sdk.loader import ClientConfig, ConfigLoader
+from hatchet_sdk.rate_limit import RateLimit
+from hatchet_sdk.v2.callable import HatchetCallable
+
+from .client import Client, new_client, new_client_raw
+from .clients.admin import AdminClient
+from .clients.dispatcher.dispatcher import DispatcherClient
+from .clients.events import EventClient
+from .clients.run_event_listener import RunEventListenerClient
+from .logger import logger
+from .worker.worker import Worker
+from .workflow import (
+ ConcurrencyExpression,
+ WorkflowInterface,
+ WorkflowMeta,
+ WorkflowStepProtocol,
+)
+
+T = TypeVar("T", bound=BaseModel)
+R = TypeVar("R")
+P = ParamSpec("P")
+
+TWorkflow = TypeVar("TWorkflow", bound=object)
+
+
+def workflow(
+ name: str = "",
+ on_events: list[str] | None = None,
+ on_crons: list[str] | None = None,
+ version: str = "",
+ timeout: str = "60m",
+ schedule_timeout: str = "5m",
+ sticky: Union[StickyStrategy.Value, None] = None, # type: ignore[name-defined]
+ default_priority: int | None = None,
+ concurrency: ConcurrencyExpression | None = None,
+ input_validator: Type[T] | None = None,
+) -> Callable[[Type[TWorkflow]], WorkflowMeta]:
+ on_events = on_events or []
+ on_crons = on_crons or []
+
+ def inner(cls: Type[TWorkflow]) -> WorkflowMeta:
+ nonlocal name
+ name = name or str(cls.__name__)
+
+ setattr(cls, "on_events", on_events)
+ setattr(cls, "on_crons", on_crons)
+ setattr(cls, "name", name)
+ setattr(cls, "version", version)
+ setattr(cls, "timeout", timeout)
+ setattr(cls, "schedule_timeout", schedule_timeout)
+ setattr(cls, "sticky", sticky)
+ setattr(cls, "default_priority", default_priority)
+ setattr(cls, "concurrency_expression", concurrency)
+
+ # Define a new class with the same name and bases as the original, but
+ # with WorkflowMeta as its metaclass
+
+ ## TODO: Figure out how to type this metaclass correctly
+ setattr(cls, "input_validator", input_validator)
+
+ return WorkflowMeta(name, cls.__bases__, dict(cls.__dict__))
+
+ return inner
+
+
+def step(
+ name: str = "",
+ timeout: str = "",
+ parents: list[str] | None = None,
+ retries: int = 0,
+ rate_limits: list[RateLimit] | None = None,
+ desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
+ backoff_factor: float | None = None,
+ backoff_max_seconds: int | None = None,
+) -> Callable[[Callable[P, R]], Callable[P, R]]:
+ parents = parents or []
+
+ def inner(func: Callable[P, R]) -> Callable[P, R]:
+ limits = None
+ if rate_limits:
+ limits = [rate_limit._req for rate_limit in rate_limits or []]
+
+ setattr(func, "_step_name", name.lower() or str(func.__name__).lower())
+ setattr(func, "_step_parents", parents)
+ setattr(func, "_step_timeout", timeout)
+ setattr(func, "_step_retries", retries)
+ setattr(func, "_step_rate_limits", limits)
+ setattr(func, "_step_backoff_factor", backoff_factor)
+ setattr(func, "_step_backoff_max_seconds", backoff_max_seconds)
+
+ def create_label(d: DesiredWorkerLabel) -> DesiredWorkerLabels:
+ value = d["value"] if "value" in d else None
+ return DesiredWorkerLabels(
+ strValue=str(value) if not isinstance(value, int) else None,
+ intValue=value if isinstance(value, int) else None,
+ required=d["required"] if "required" in d else None, # type: ignore[arg-type]
+ weight=d["weight"] if "weight" in d else None,
+ comparator=d["comparator"] if "comparator" in d else None, # type: ignore[arg-type]
+ )
+
+ setattr(
+ func,
+ "_step_desired_worker_labels",
+ {key: create_label(d) for key, d in desired_worker_labels.items()},
+ )
+
+ return func
+
+ return inner
+
+
+def on_failure_step(
+ name: str = "",
+ timeout: str = "",
+ retries: int = 0,
+ rate_limits: list[RateLimit] | None = None,
+ backoff_factor: float | None = None,
+ backoff_max_seconds: int | None = None,
+) -> Callable[..., Any]:
+ def inner(func: Callable[[Context], Any]) -> Callable[[Context], Any]:
+ limits = None
+ if rate_limits:
+ limits = [
+ CreateStepRateLimit(key=rate_limit.static_key, units=rate_limit.units) # type: ignore[arg-type]
+ for rate_limit in rate_limits or []
+ ]
+
+ setattr(
+ func, "_on_failure_step_name", name.lower() or str(func.__name__).lower()
+ )
+ setattr(func, "_on_failure_step_timeout", timeout)
+ setattr(func, "_on_failure_step_retries", retries)
+ setattr(func, "_on_failure_step_rate_limits", limits)
+ setattr(func, "_on_failure_step_backoff_factor", backoff_factor)
+ setattr(func, "_on_failure_step_backoff_max_seconds", backoff_max_seconds)
+
+ return func
+
+ return inner
+
+
+def concurrency(
+ name: str = "",
+ max_runs: int = 1,
+ limit_strategy: ConcurrencyLimitStrategy = ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS,
+) -> Callable[..., Any]:
+ def inner(func: Callable[[Context], Any]) -> Callable[[Context], Any]:
+ setattr(
+ func,
+ "_concurrency_fn_name",
+ name.lower() or str(func.__name__).lower(),
+ )
+ setattr(func, "_concurrency_max_runs", max_runs)
+ setattr(func, "_concurrency_limit_strategy", limit_strategy)
+
+ return func
+
+ return inner
+
+
+class HatchetRest:
+ """
+ Main client for interacting with the Hatchet API.
+
+ This class provides access to various client interfaces and utility methods
+ for working with Hatchet via the REST API,
+
+ Attributes:
+ rest (RestApi): Interface for REST API operations.
+ """
+
+ rest: RestApi
+
+ def __init__(self, config: ClientConfig = ClientConfig()):
+ _config: ClientConfig = ConfigLoader(".").load_client_config(config)
+ self.rest = RestApi(_config.server_url, _config.token, _config.tenant_id)
+
+
+class Hatchet:
+ """
+ Main client for interacting with the Hatchet SDK.
+
+ This class provides access to various client interfaces and utility methods
+ for working with Hatchet workers, workflows, and steps.
+
+ Attributes:
+ cron (CronClient): Interface for cron trigger operations.
+
+ admin (AdminClient): Interface for administrative operations.
+ dispatcher (DispatcherClient): Interface for dispatching operations.
+ event (EventClient): Interface for event-related operations.
+ rest (RestApi): Interface for REST API operations.
+ """
+
+ _client: Client
+ cron: CronClient
+ scheduled: ScheduledClient
+
+ @classmethod
+ def from_environment(
+ cls, defaults: ClientConfig = ClientConfig(), **kwargs: Any
+ ) -> "Hatchet":
+ return cls(client=new_client(defaults), **kwargs)
+
+ @classmethod
+ def from_config(cls, config: ClientConfig, **kwargs: Any) -> "Hatchet":
+ return cls(client=new_client_raw(config), **kwargs)
+
+ def __init__(
+ self,
+ debug: bool = False,
+ client: Optional[Client] = None,
+ config: ClientConfig = ClientConfig(),
+ ):
+ """
+ Initialize a new Hatchet instance.
+
+ Args:
+ debug (bool, optional): Enable debug logging. Defaults to False.
+ client (Optional[Client], optional): A pre-configured Client instance. Defaults to None.
+ config (ClientConfig, optional): Configuration for creating a new Client. Defaults to ClientConfig().
+ """
+ if client is not None:
+ self._client = client
+ else:
+ self._client = new_client(config, debug)
+
+ if debug:
+ logger.setLevel(logging.DEBUG)
+
+ self.cron = CronClient(self._client)
+ self.scheduled = ScheduledClient(self._client)
+
+ @property
+ @deprecated(
+ "Direct access to client is deprecated and will be removed in a future version. Use specific client properties (Hatchet.admin, Hatchet.dispatcher, Hatchet.event, Hatchet.rest) instead. [0.32.0]",
+ )
+ def client(self) -> Client:
+ return self._client
+
+ @property
+ def admin(self) -> AdminClient:
+ return self._client.admin
+
+ @property
+ def dispatcher(self) -> DispatcherClient:
+ return self._client.dispatcher
+
+ @property
+ def event(self) -> EventClient:
+ return self._client.event
+
+ @property
+ def rest(self) -> RestApi:
+ return self._client.rest
+
+ @property
+ def listener(self) -> RunEventListenerClient:
+ return self._client.listener
+
+ @property
+ def config(self) -> ClientConfig:
+ return self._client.config
+
+ @property
+ def tenant_id(self) -> str:
+ return self._client.config.tenant_id
+
+ concurrency = staticmethod(concurrency)
+
+ workflow = staticmethod(workflow)
+
+ step = staticmethod(step)
+
+ on_failure_step = staticmethod(on_failure_step)
+
+ def worker(
+ self, name: str, max_runs: int | None = None, labels: dict[str, str | int] = {}
+ ) -> Worker:
+ try:
+ loop = asyncio.get_running_loop()
+ except RuntimeError:
+ loop = None
+
+ return Worker(
+ name=name,
+ max_runs=max_runs,
+ labels=labels,
+ config=self._client.config,
+ debug=self._client.debug,
+ owned_loop=loop is None,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/labels.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/labels.py
new file mode 100644
index 00000000..646c666d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/labels.py
@@ -0,0 +1,10 @@
+from typing import TypedDict
+
+
+class DesiredWorkerLabel(TypedDict, total=False):
+ value: str | int
+ required: bool | None = None
+ weight: int | None = None
+ comparator: int | None = (
+ None # _ClassVar[WorkerLabelComparator] TODO figure out type
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/loader.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/loader.py
new file mode 100644
index 00000000..0252f33a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/loader.py
@@ -0,0 +1,244 @@
+import os
+from logging import Logger, getLogger
+from typing import Dict, Optional
+from warnings import warn
+
+import yaml
+
+from .token import get_addresses_from_jwt, get_tenant_id_from_jwt
+
+
+class ClientTLSConfig:
+ def __init__(
+ self,
+ tls_strategy: str,
+ cert_file: str,
+ key_file: str,
+ ca_file: str,
+ server_name: str,
+ ):
+ self.tls_strategy = tls_strategy
+ self.cert_file = cert_file
+ self.key_file = key_file
+ self.ca_file = ca_file
+ self.server_name = server_name
+
+
+class ClientConfig:
+ logInterceptor: Logger
+
+ def __init__(
+ self,
+ tenant_id: str = None,
+ tls_config: ClientTLSConfig = None,
+ token: str = None,
+ host_port: str = "localhost:7070",
+ server_url: str = "https://app.dev.hatchet-tools.com",
+ namespace: str = None,
+ listener_v2_timeout: int = None,
+ logger: Logger = None,
+ grpc_max_recv_message_length: int = 4 * 1024 * 1024, # 4MB
+ grpc_max_send_message_length: int = 4 * 1024 * 1024, # 4MB
+ worker_healthcheck_port: int | None = None,
+ worker_healthcheck_enabled: bool | None = None,
+ worker_preset_labels: dict[str, str] = {},
+ enable_force_kill_sync_threads: bool = False,
+ ):
+ self.tenant_id = tenant_id
+ self.tls_config = tls_config
+ self.host_port = host_port
+ self.token = token
+ self.server_url = server_url
+ self.namespace = ""
+ self.logInterceptor = logger
+ self.grpc_max_recv_message_length = grpc_max_recv_message_length
+ self.grpc_max_send_message_length = grpc_max_send_message_length
+ self.worker_healthcheck_port = worker_healthcheck_port
+ self.worker_healthcheck_enabled = worker_healthcheck_enabled
+ self.worker_preset_labels = worker_preset_labels
+ self.enable_force_kill_sync_threads = enable_force_kill_sync_threads
+
+ if not self.logInterceptor:
+ self.logInterceptor = getLogger()
+
+ # case on whether the namespace already has a trailing underscore
+ if namespace and not namespace.endswith("_"):
+ self.namespace = f"{namespace}_"
+ elif namespace:
+ self.namespace = namespace
+
+ self.namespace = self.namespace.lower()
+
+ self.listener_v2_timeout = listener_v2_timeout
+
+
+class ConfigLoader:
+ def __init__(self, directory: str):
+ self.directory = directory
+
+ def load_client_config(self, defaults: ClientConfig) -> ClientConfig:
+ config_file_path = os.path.join(self.directory, "client.yaml")
+ config_data: object = {"tls": {}}
+
+ # determine if client.yaml exists
+ if os.path.exists(config_file_path):
+ with open(config_file_path, "r") as file:
+ config_data = yaml.safe_load(file)
+
+ def get_config_value(key, env_var):
+ if key in config_data:
+ return config_data[key]
+
+ if self._get_env_var(env_var) is not None:
+ return self._get_env_var(env_var)
+
+ return getattr(defaults, key, None)
+
+ namespace = get_config_value("namespace", "HATCHET_CLIENT_NAMESPACE")
+
+ tenant_id = get_config_value("tenantId", "HATCHET_CLIENT_TENANT_ID")
+ token = get_config_value("token", "HATCHET_CLIENT_TOKEN")
+ listener_v2_timeout = get_config_value(
+ "listener_v2_timeout", "HATCHET_CLIENT_LISTENER_V2_TIMEOUT"
+ )
+ listener_v2_timeout = int(listener_v2_timeout) if listener_v2_timeout else None
+
+ if not token:
+ raise ValueError(
+ "Token must be set via HATCHET_CLIENT_TOKEN environment variable"
+ )
+
+ host_port = get_config_value("hostPort", "HATCHET_CLIENT_HOST_PORT")
+ server_url: str | None = None
+
+ grpc_max_recv_message_length = get_config_value(
+ "grpc_max_recv_message_length",
+ "HATCHET_CLIENT_GRPC_MAX_RECV_MESSAGE_LENGTH",
+ )
+ grpc_max_send_message_length = get_config_value(
+ "grpc_max_send_message_length",
+ "HATCHET_CLIENT_GRPC_MAX_SEND_MESSAGE_LENGTH",
+ )
+
+ if grpc_max_recv_message_length:
+ grpc_max_recv_message_length = int(grpc_max_recv_message_length)
+
+ if grpc_max_send_message_length:
+ grpc_max_send_message_length = int(grpc_max_send_message_length)
+
+ if not host_port:
+ # extract host and port from token
+ server_url, grpc_broadcast_address = get_addresses_from_jwt(token)
+ host_port = grpc_broadcast_address
+
+ if not tenant_id:
+ tenant_id = get_tenant_id_from_jwt(token)
+
+ tls_config = self._load_tls_config(config_data["tls"], host_port)
+
+ worker_healthcheck_port = int(
+ get_config_value(
+ "worker_healthcheck_port", "HATCHET_CLIENT_WORKER_HEALTHCHECK_PORT"
+ )
+ or 8001
+ )
+
+ worker_healthcheck_enabled = (
+ str(
+ get_config_value(
+ "worker_healthcheck_port",
+ "HATCHET_CLIENT_WORKER_HEALTHCHECK_ENABLED",
+ )
+ )
+ == "True"
+ )
+
+ # Add preset labels to the worker config
+ worker_preset_labels: dict[str, str] = defaults.worker_preset_labels
+
+ autoscaling_target = get_config_value(
+ "autoscaling_target", "HATCHET_CLIENT_AUTOSCALING_TARGET"
+ )
+
+ if autoscaling_target:
+ worker_preset_labels["hatchet-autoscaling-target"] = autoscaling_target
+
+ legacy_otlp_headers = get_config_value(
+ "otel_exporter_otlp_endpoint", "HATCHET_CLIENT_OTEL_EXPORTER_OTLP_ENDPOINT"
+ )
+
+ legacy_otlp_headers = get_config_value(
+ "otel_exporter_otlp_headers", "HATCHET_CLIENT_OTEL_EXPORTER_OTLP_HEADERS"
+ )
+
+ if legacy_otlp_headers or legacy_otlp_headers:
+ warn(
+ "The `otel_exporter_otlp_*` fields are no longer supported as of SDK version `0.46.0`. Please see the documentation on OpenTelemetry at https://docs.hatchet.run/home/features/opentelemetry for more information on how to migrate to the new `HatchetInstrumentor`."
+ )
+
+ enable_force_kill_sync_threads = bool(
+ get_config_value(
+ "enable_force_kill_sync_threads",
+ "HATCHET_CLIENT_ENABLE_FORCE_KILL_SYNC_THREADS",
+ )
+ == "True"
+ or False
+ )
+ return ClientConfig(
+ tenant_id=tenant_id,
+ tls_config=tls_config,
+ token=token,
+ host_port=host_port,
+ server_url=server_url,
+ namespace=namespace,
+ listener_v2_timeout=listener_v2_timeout,
+ logger=defaults.logInterceptor,
+ grpc_max_recv_message_length=grpc_max_recv_message_length,
+ grpc_max_send_message_length=grpc_max_send_message_length,
+ worker_healthcheck_port=worker_healthcheck_port,
+ worker_healthcheck_enabled=worker_healthcheck_enabled,
+ worker_preset_labels=worker_preset_labels,
+ enable_force_kill_sync_threads=enable_force_kill_sync_threads,
+ )
+
+ def _load_tls_config(self, tls_data: Dict, host_port) -> ClientTLSConfig:
+ tls_strategy = (
+ tls_data["tlsStrategy"]
+ if "tlsStrategy" in tls_data
+ else self._get_env_var("HATCHET_CLIENT_TLS_STRATEGY")
+ )
+
+ if not tls_strategy:
+ tls_strategy = "tls"
+
+ cert_file = (
+ tls_data["tlsCertFile"]
+ if "tlsCertFile" in tls_data
+ else self._get_env_var("HATCHET_CLIENT_TLS_CERT_FILE")
+ )
+ key_file = (
+ tls_data["tlsKeyFile"]
+ if "tlsKeyFile" in tls_data
+ else self._get_env_var("HATCHET_CLIENT_TLS_KEY_FILE")
+ )
+ ca_file = (
+ tls_data["tlsRootCAFile"]
+ if "tlsRootCAFile" in tls_data
+ else self._get_env_var("HATCHET_CLIENT_TLS_ROOT_CA_FILE")
+ )
+
+ server_name = (
+ tls_data["tlsServerName"]
+ if "tlsServerName" in tls_data
+ else self._get_env_var("HATCHET_CLIENT_TLS_SERVER_NAME")
+ )
+
+ # if server_name is not set, use the host from the host_port
+ if not server_name:
+ server_name = host_port.split(":")[0]
+
+ return ClientTLSConfig(tls_strategy, cert_file, key_file, ca_file, server_name)
+
+ @staticmethod
+ def _get_env_var(env_var: str, default: Optional[str] = None) -> str:
+ return os.environ.get(env_var, default)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/logger.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/logger.py
new file mode 100644
index 00000000..355d0707
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/logger.py
@@ -0,0 +1,13 @@
+import logging
+import sys
+
+# Create a named logger
+logger = logging.getLogger("hatchet")
+logger.setLevel(logging.INFO)
+
+handler = logging.StreamHandler(sys.stdout)
+formatter = logging.Formatter("[%(levelname)s]\t🪓 -- %(asctime)s - %(message)s")
+handler.setFormatter(formatter)
+logger.addHandler(handler)
+
+logger.propagate = False
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/metadata.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/metadata.py
new file mode 100644
index 00000000..38a31b8b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/metadata.py
@@ -0,0 +1,2 @@
+def get_metadata(token: str):
+ return [("authorization", "bearer " + token)]
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/opentelemetry/instrumentor.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/opentelemetry/instrumentor.py
new file mode 100644
index 00000000..91474c52
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/opentelemetry/instrumentor.py
@@ -0,0 +1,396 @@
+from importlib.metadata import version
+from typing import Any, Callable, Collection, Coroutine
+
+try:
+ from opentelemetry.context import Context
+ from opentelemetry.instrumentation.instrumentor import ( # type: ignore[attr-defined]
+ BaseInstrumentor,
+ )
+ from opentelemetry.instrumentation.utils import unwrap
+ from opentelemetry.metrics import MeterProvider, NoOpMeterProvider, get_meter
+ from opentelemetry.trace import (
+ NoOpTracerProvider,
+ StatusCode,
+ TracerProvider,
+ get_tracer,
+ get_tracer_provider,
+ )
+ from opentelemetry.trace.propagation.tracecontext import (
+ TraceContextTextMapPropagator,
+ )
+ from wrapt import wrap_function_wrapper # type: ignore[import-untyped]
+except (RuntimeError, ImportError, ModuleNotFoundError):
+ raise ModuleNotFoundError(
+ "To use the HatchetInstrumentor, you must install Hatchet's `otel` extra using (e.g.) `pip install hatchet-sdk[otel]`"
+ )
+
+import hatchet_sdk
+from hatchet_sdk.clients.admin import (
+ AdminClient,
+ TriggerWorkflowOptions,
+ WorkflowRunDict,
+)
+from hatchet_sdk.clients.dispatcher.action_listener import Action
+from hatchet_sdk.clients.events import (
+ BulkPushEventWithMetadata,
+ EventClient,
+ PushEventOptions,
+)
+from hatchet_sdk.contracts.events_pb2 import Event
+from hatchet_sdk.worker.runner.runner import Runner
+from hatchet_sdk.workflow_run import WorkflowRunRef
+
+hatchet_sdk_version = version("hatchet-sdk")
+
+InstrumentKwargs = TracerProvider | MeterProvider | None
+
+OTEL_TRACEPARENT_KEY = "traceparent"
+
+
+def create_traceparent() -> str | None:
+ """
+ Creates and returns a W3C traceparent header value using OpenTelemetry's context propagation.
+
+ The traceparent header is used to propagate context information across service boundaries
+ in distributed tracing systems. It follows the W3C Trace Context specification.
+
+ :returns: A W3C-formatted traceparent header value if successful, None if the context
+ injection fails or no active span exists.\n
+ Example: `00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01`
+ :rtype: str | None:
+ """
+
+ carrier: dict[str, str] = {}
+ TraceContextTextMapPropagator().inject(carrier)
+
+ return carrier.get("traceparent")
+
+
+def parse_carrier_from_metadata(metadata: dict[str, str] | None) -> Context | None:
+ """
+ Parses OpenTelemetry trace context from a metadata dictionary.
+
+ Extracts the trace context from metadata using the W3C Trace Context format,
+ specifically looking for the `traceparent` header.
+
+ :param metadata: A dictionary containing metadata key-value pairs,
+ potentially including the `traceparent` header. Can be None.
+ :type metadata: dict[str, str] | None
+ :returns: The extracted OpenTelemetry Context object if a valid `traceparent`
+ is found in the metadata, otherwise None.
+ :rtype: Context | None
+
+ :Example:
+
+ >>> metadata = {"traceparent": "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01"}
+ >>> context = parse_carrier_from_metadata(metadata)
+ """
+
+ if not metadata:
+ return None
+
+ traceparent = metadata.get(OTEL_TRACEPARENT_KEY)
+
+ if not traceparent:
+ return None
+
+ return TraceContextTextMapPropagator().extract({OTEL_TRACEPARENT_KEY: traceparent})
+
+
+def inject_traceparent_into_metadata(
+ metadata: dict[str, str], traceparent: str | None = None
+) -> dict[str, str]:
+ """
+ Injects OpenTelemetry `traceparent` into a metadata dictionary.
+
+ Takes a metadata dictionary and an optional `traceparent` string,
+ returning a new metadata dictionary with the `traceparent` added under the
+ `OTEL_TRACEPARENT_KEY`. If no `traceparent` is provided, it attempts to create one.
+
+ :param metadata: The metadata dictionary to inject the `traceparent` into.
+ :type metadata: dict[str, str]
+ :param traceparent: The `traceparent` string to inject. If None, attempts to use
+ the current span.
+ :type traceparent: str | None, optional
+ :returns: A new metadata dictionary containing the original metadata plus
+ the injected `traceparent`, if one was available or could be created.
+ :rtype: dict[str, str]
+
+ :Example:
+
+ >>> metadata = {"key": "value"}
+ >>> new_metadata = inject_traceparent(metadata, "00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01")
+ >>> print(new_metadata)
+ {"key": "value", "traceparent": "00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01"}
+ """
+
+ if not traceparent:
+ traceparent = create_traceparent()
+
+ if not traceparent:
+ return metadata
+
+ return {
+ **metadata,
+ OTEL_TRACEPARENT_KEY: traceparent,
+ }
+
+
+class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
+ def __init__(
+ self,
+ tracer_provider: TracerProvider | None = None,
+ meter_provider: MeterProvider | None = None,
+ ):
+ """
+ Hatchet OpenTelemetry instrumentor.
+
+ The instrumentor provides an OpenTelemetry integration for Hatchet by setting up
+ tracing and metrics collection.
+
+ :param tracer_provider: TracerProvider | None: The OpenTelemetry TracerProvider to use.
+ If not provided, the global tracer provider will be used.
+ :param meter_provider: MeterProvider | None: The OpenTelemetry MeterProvider to use.
+ If not provided, a no-op meter provider will be used.
+ """
+
+ self.tracer_provider = tracer_provider or get_tracer_provider()
+ self.meter_provider = meter_provider or NoOpMeterProvider()
+
+ super().__init__()
+
+ def instrumentation_dependencies(self) -> Collection[str]:
+ return tuple()
+
+ def _instrument(self, **kwargs: InstrumentKwargs) -> None:
+ self._tracer = get_tracer(__name__, hatchet_sdk_version, self.tracer_provider)
+ self._meter = get_meter(__name__, hatchet_sdk_version, self.meter_provider)
+
+ wrap_function_wrapper(
+ hatchet_sdk,
+ "worker.runner.runner.Runner.handle_start_step_run",
+ self._wrap_handle_start_step_run,
+ )
+ wrap_function_wrapper(
+ hatchet_sdk,
+ "worker.runner.runner.Runner.handle_start_group_key_run",
+ self._wrap_handle_get_group_key_run,
+ )
+ wrap_function_wrapper(
+ hatchet_sdk,
+ "worker.runner.runner.Runner.handle_cancel_action",
+ self._wrap_handle_cancel_action,
+ )
+
+ wrap_function_wrapper(
+ hatchet_sdk,
+ "clients.events.EventClient.push",
+ self._wrap_push_event,
+ )
+
+ wrap_function_wrapper(
+ hatchet_sdk,
+ "clients.events.EventClient.bulk_push",
+ self._wrap_bulk_push_event,
+ )
+
+ wrap_function_wrapper(
+ hatchet_sdk,
+ "clients.admin.AdminClient.run_workflow",
+ self._wrap_run_workflow,
+ )
+
+ wrap_function_wrapper(
+ hatchet_sdk,
+ "clients.admin.AdminClientAioImpl.run_workflow",
+ self._wrap_async_run_workflow,
+ )
+
+ wrap_function_wrapper(
+ hatchet_sdk,
+ "clients.admin.AdminClient.run_workflows",
+ self._wrap_run_workflows,
+ )
+
+ wrap_function_wrapper(
+ hatchet_sdk,
+ "clients.admin.AdminClientAioImpl.run_workflows",
+ self._wrap_async_run_workflows,
+ )
+
+ ## IMPORTANT: Keep these types in sync with the wrapped method's signature
+ async def _wrap_handle_start_step_run(
+ self,
+ wrapped: Callable[[Action], Coroutine[None, None, Exception | None]],
+ instance: Runner,
+ args: tuple[Action],
+ kwargs: Any,
+ ) -> Exception | None:
+ action = args[0]
+ traceparent = parse_carrier_from_metadata(action.additional_metadata)
+
+ with self._tracer.start_as_current_span(
+ "hatchet.start_step_run",
+ attributes=action.otel_attributes,
+ context=traceparent,
+ ) as span:
+ result = await wrapped(*args, **kwargs)
+
+ if isinstance(result, Exception):
+ span.set_status(StatusCode.ERROR, str(result))
+
+ return result
+
+ ## IMPORTANT: Keep these types in sync with the wrapped method's signature
+ async def _wrap_handle_get_group_key_run(
+ self,
+ wrapped: Callable[[Action], Coroutine[None, None, Exception | None]],
+ instance: Runner,
+ args: tuple[Action],
+ kwargs: Any,
+ ) -> Exception | None:
+ action = args[0]
+
+ with self._tracer.start_as_current_span(
+ "hatchet.get_group_key_run",
+ attributes=action.otel_attributes,
+ ) as span:
+ result = await wrapped(*args, **kwargs)
+
+ if isinstance(result, Exception):
+ span.set_status(StatusCode.ERROR, str(result))
+
+ return result
+
+ ## IMPORTANT: Keep these types in sync with the wrapped method's signature
+ async def _wrap_handle_cancel_action(
+ self,
+ wrapped: Callable[[str], Coroutine[None, None, Exception | None]],
+ instance: Runner,
+ args: tuple[str],
+ kwargs: Any,
+ ) -> Exception | None:
+ step_run_id = args[0]
+
+ with self._tracer.start_as_current_span(
+ "hatchet.cancel_step_run",
+ attributes={
+ "hatchet.step_run_id": step_run_id,
+ },
+ ):
+ return await wrapped(*args, **kwargs)
+
+ ## IMPORTANT: Keep these types in sync with the wrapped method's signature
+ def _wrap_push_event(
+ self,
+ wrapped: Callable[[str, dict[str, Any], PushEventOptions | None], Event],
+ instance: EventClient,
+ args: tuple[
+ str,
+ dict[str, Any],
+ PushEventOptions | None,
+ ],
+ kwargs: dict[str, str | dict[str, Any] | PushEventOptions | None],
+ ) -> Event:
+ with self._tracer.start_as_current_span(
+ "hatchet.push_event",
+ ):
+ return wrapped(*args, **kwargs)
+
+ ## IMPORTANT: Keep these types in sync with the wrapped method's signature
+ def _wrap_bulk_push_event(
+ self,
+ wrapped: Callable[
+ [list[BulkPushEventWithMetadata], PushEventOptions | None], list[Event]
+ ],
+ instance: EventClient,
+ args: tuple[
+ list[BulkPushEventWithMetadata],
+ PushEventOptions | None,
+ ],
+ kwargs: dict[str, list[BulkPushEventWithMetadata] | PushEventOptions | None],
+ ) -> list[Event]:
+ with self._tracer.start_as_current_span(
+ "hatchet.bulk_push_event",
+ ):
+ return wrapped(*args, **kwargs)
+
+ ## IMPORTANT: Keep these types in sync with the wrapped method's signature
+ def _wrap_run_workflow(
+ self,
+ wrapped: Callable[[str, Any, TriggerWorkflowOptions | None], WorkflowRunRef],
+ instance: AdminClient,
+ args: tuple[str, Any, TriggerWorkflowOptions | None],
+ kwargs: dict[str, str | Any | TriggerWorkflowOptions | None],
+ ) -> WorkflowRunRef:
+ with self._tracer.start_as_current_span(
+ "hatchet.run_workflow",
+ ):
+ return wrapped(*args, **kwargs)
+
+ ## IMPORTANT: Keep these types in sync with the wrapped method's signature
+ async def _wrap_async_run_workflow(
+ self,
+ wrapped: Callable[
+ [str, Any, TriggerWorkflowOptions | None],
+ Coroutine[None, None, WorkflowRunRef],
+ ],
+ instance: AdminClient,
+ args: tuple[str, Any, TriggerWorkflowOptions | None],
+ kwargs: dict[str, str | Any | TriggerWorkflowOptions | None],
+ ) -> WorkflowRunRef:
+ with self._tracer.start_as_current_span(
+ "hatchet.run_workflow",
+ ):
+ return await wrapped(*args, **kwargs)
+
+ ## IMPORTANT: Keep these types in sync with the wrapped method's signature
+ def _wrap_run_workflows(
+ self,
+ wrapped: Callable[
+ [list[WorkflowRunDict], TriggerWorkflowOptions | None], list[WorkflowRunRef]
+ ],
+ instance: AdminClient,
+ args: tuple[
+ list[WorkflowRunDict],
+ TriggerWorkflowOptions | None,
+ ],
+ kwargs: dict[str, list[WorkflowRunDict] | TriggerWorkflowOptions | None],
+ ) -> list[WorkflowRunRef]:
+ with self._tracer.start_as_current_span(
+ "hatchet.run_workflows",
+ ):
+ return wrapped(*args, **kwargs)
+
+ ## IMPORTANT: Keep these types in sync with the wrapped method's signature
+ async def _wrap_async_run_workflows(
+ self,
+ wrapped: Callable[
+ [list[WorkflowRunDict], TriggerWorkflowOptions | None],
+ Coroutine[None, None, list[WorkflowRunRef]],
+ ],
+ instance: AdminClient,
+ args: tuple[
+ list[WorkflowRunDict],
+ TriggerWorkflowOptions | None,
+ ],
+ kwargs: dict[str, list[WorkflowRunDict] | TriggerWorkflowOptions | None],
+ ) -> list[WorkflowRunRef]:
+ with self._tracer.start_as_current_span(
+ "hatchet.run_workflows",
+ ):
+ return await wrapped(*args, **kwargs)
+
+ def _uninstrument(self, **kwargs: InstrumentKwargs) -> None:
+ self.tracer_provider = NoOpTracerProvider()
+ self.meter_provider = NoOpMeterProvider()
+
+ unwrap(hatchet_sdk, "worker.runner.runner.Runner.handle_start_step_run")
+ unwrap(hatchet_sdk, "worker.runner.runner.Runner.handle_start_group_key_run")
+ unwrap(hatchet_sdk, "worker.runner.runner.Runner.handle_cancel_action")
+ unwrap(hatchet_sdk, "clients.events.EventClient.push")
+ unwrap(hatchet_sdk, "clients.events.EventClient.bulk_push")
+ unwrap(hatchet_sdk, "clients.admin.AdminClient.run_workflow")
+ unwrap(hatchet_sdk, "clients.admin.AdminClientAioImpl.run_workflow")
+ unwrap(hatchet_sdk, "clients.admin.AdminClient.run_workflows")
+ unwrap(hatchet_sdk, "clients.admin.AdminClientAioImpl.run_workflows")
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/py.typed b/.venv/lib/python3.12/site-packages/hatchet_sdk/py.typed
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/py.typed
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/rate_limit.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/rate_limit.py
new file mode 100644
index 00000000..0d7b9143
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/rate_limit.py
@@ -0,0 +1,126 @@
+from dataclasses import dataclass
+from typing import Union
+
+from celpy import CELEvalError, Environment
+
+from hatchet_sdk.contracts.workflows_pb2 import CreateStepRateLimit
+
+
+def validate_cel_expression(expr: str) -> bool:
+ env = Environment()
+ try:
+ env.compile(expr)
+ return True
+ except CELEvalError:
+ return False
+
+
+class RateLimitDuration:
+ SECOND = "SECOND"
+ MINUTE = "MINUTE"
+ HOUR = "HOUR"
+ DAY = "DAY"
+ WEEK = "WEEK"
+ MONTH = "MONTH"
+ YEAR = "YEAR"
+
+
+@dataclass
+class RateLimit:
+ """
+ Represents a rate limit configuration for a step in a workflow.
+
+ This class allows for both static and dynamic rate limiting based on various parameters.
+ It supports both simple integer values and Common Expression Language (CEL) expressions
+ for dynamic evaluation.
+
+ Attributes:
+ static_key (str, optional): A static key for rate limiting.
+ dynamic_key (str, optional): A CEL expression for dynamic key evaluation.
+ units (int or str, default=1): The number of units or a CEL expression for dynamic unit calculation.
+ limit (int or str, optional): The rate limit value or a CEL expression for dynamic limit calculation.
+ duration (str, default=RateLimitDuration.MINUTE): The window duration of the rate limit.
+ key (str, optional): Deprecated. Use static_key instead.
+
+ Usage:
+ 1. Static rate limit:
+ rate_limit = RateLimit(static_key="external-api", units=100)
+ > NOTE: if you want to use a static key, you must first put the rate limit: hatchet.admin.put_rate_limit("external-api", 200, RateLimitDuration.SECOND)
+
+ 2. Dynamic rate limit with CEL expressions:
+ rate_limit = RateLimit(
+ dynamic_key="input.user_id",
+ units="input.units",
+ limit="input.limit * input.user_tier"
+ )
+
+ Note:
+ - Either static_key or dynamic_key must be set, but not both.
+ - When using dynamic_key, limit must also be set.
+ - CEL expressions are validated upon instantiation.
+
+ Raises:
+ ValueError: If invalid combinations of attributes are provided or if CEL expressions are invalid.
+ DeprecationWarning: If the deprecated 'key' attribute is used.
+ """
+
+ key: Union[str, None] = None
+ static_key: Union[str, None] = None
+ dynamic_key: Union[str, None] = None
+ units: Union[int, str] = 1
+ limit: Union[int, str, None] = None
+ duration: RateLimitDuration = RateLimitDuration.MINUTE
+
+ _req: CreateStepRateLimit = None
+
+ def __post_init__(self):
+ # juggle the key and key_expr fields
+ key = self.static_key
+ key_expression = self.dynamic_key
+
+ if self.key is not None:
+ DeprecationWarning(
+ "key is deprecated and will be removed in a future release, please use static_key instead"
+ )
+ key = self.key
+
+ if key_expression is not None:
+ if key is not None:
+ raise ValueError("Cannot have both static key and dynamic key set")
+
+ key = key_expression
+ if not validate_cel_expression(key_expression):
+ raise ValueError(f"Invalid CEL expression: {key_expression}")
+
+ # juggle the units and units_expr fields
+ units = None
+ units_expression = None
+ if isinstance(self.units, int):
+ units = self.units
+ else:
+ if not validate_cel_expression(self.units):
+ raise ValueError(f"Invalid CEL expression: {self.units}")
+ units_expression = self.units
+
+ # juggle the limit and limit_expr fields
+ limit_expression = None
+
+ if self.limit:
+ if isinstance(self.limit, int):
+ limit_expression = f"{self.limit}"
+ else:
+ if not validate_cel_expression(self.limit):
+ raise ValueError(f"Invalid CEL expression: {self.limit}")
+ limit_expression = self.limit
+
+ if key_expression is not None and limit_expression is None:
+ raise ValueError("CEL based keys requires limit to be set")
+
+ self._req = CreateStepRateLimit(
+ key=key,
+ key_expr=key_expression,
+ units=units,
+ units_expr=units_expression,
+ limit_values_expr=limit_expression,
+ duration=self.duration,
+ )
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/semver.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/semver.py
new file mode 100644
index 00000000..4bb9aa4f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/semver.py
@@ -0,0 +1,30 @@
+def bump_minor_version(version: str) -> str:
+ """
+ Bumps the minor version of a semantic version string. NOTE this doesn't follow full semver,
+ missing the build metadata and pre-release version.
+
+ :param version: A semantic version string in the format major.minor.patch
+ :return: A string with the minor version bumped and patch version reset to 0
+ :raises ValueError: If the input is not a valid semantic version string
+ """
+ # if it starts with a v, remove it
+ had_v = False
+ if version.startswith("v"):
+ version = version[1:]
+ had_v = True
+
+ parts = version.split(".")
+ if len(parts) != 3:
+ raise ValueError(f"Invalid semantic version: {version}")
+
+ try:
+ major, minor, _ = map(int, parts)
+ except ValueError:
+ raise ValueError(f"Invalid semantic version: {version}")
+
+ new_minor = minor + 1
+ new_version = f"{major}.{new_minor}.0"
+
+ if had_v:
+ new_version = "v" + new_version
+ return new_version
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/token.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/token.py
new file mode 100644
index 00000000..313a6671
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/token.py
@@ -0,0 +1,27 @@
+import base64
+import json
+
+
+def get_tenant_id_from_jwt(token: str) -> str:
+ claims = extract_claims_from_jwt(token)
+
+ return claims.get("sub")
+
+
+def get_addresses_from_jwt(token: str) -> (str, str):
+ claims = extract_claims_from_jwt(token)
+
+ return claims.get("server_url"), claims.get("grpc_broadcast_address")
+
+
+def extract_claims_from_jwt(token: str):
+ parts = token.split(".")
+ if len(parts) != 3:
+ raise ValueError("Invalid token format")
+
+ claims_part = parts[1]
+ claims_part += "=" * ((4 - len(claims_part) % 4) % 4) # Padding for base64 decoding
+ claims_data = base64.urlsafe_b64decode(claims_part)
+ claims = json.loads(claims_data)
+
+ return claims
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/aio_utils.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/aio_utils.py
new file mode 100644
index 00000000..459205f1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/aio_utils.py
@@ -0,0 +1,137 @@
+import asyncio
+import inspect
+from concurrent.futures import Executor
+from functools import partial, wraps
+from threading import Thread
+from typing import Any
+
+
+## TODO: Stricter typing here
+def sync_to_async(func: Any) -> Any:
+ """
+ A decorator to run a synchronous function or coroutine in an asynchronous context with added
+ asyncio loop safety.
+
+ This decorator allows you to safely call synchronous functions or coroutines from an
+ asynchronous function by running them in an executor.
+
+ Args:
+ func (callable): The synchronous function or coroutine to be run asynchronously.
+
+ Returns:
+ callable: An asynchronous wrapper function that runs the given function in an executor.
+
+ Example:
+ @sync_to_async
+ def sync_function(x, y):
+ return x + y
+
+ @sync_to_async
+ async def async_function(x, y):
+ return x + y
+
+
+ def undecorated_function(x, y):
+ return x + y
+
+ async def main():
+ result1 = await sync_function(1, 2)
+ result2 = await async_function(3, 4)
+ result3 = await sync_to_async(undecorated_function)(5, 6)
+ print(result1, result2, result3)
+
+ asyncio.run(main())
+ """
+
+ ## TODO: Stricter typing here
+ @wraps(func)
+ async def run(
+ *args: Any,
+ loop: asyncio.AbstractEventLoop | None = None,
+ executor: Executor | None = None,
+ **kwargs: Any
+ ) -> Any:
+ """
+ The asynchronous wrapper function that runs the given function in an executor.
+
+ Args:
+ *args: Positional arguments to pass to the function.
+ loop (asyncio.AbstractEventLoop, optional): The event loop to use. If None, the current running loop is used.
+ executor (concurrent.futures.Executor, optional): The executor to use. If None, the default executor is used.
+ **kwargs: Keyword arguments to pass to the function.
+
+ Returns:
+ The result of the function call.
+ """
+ if loop is None:
+ loop = asyncio.get_running_loop()
+
+ if inspect.iscoroutinefunction(func):
+ # Wrap the coroutine to run it in an executor
+ async def wrapper() -> Any:
+ return await func(*args, **kwargs)
+
+ pfunc = partial(asyncio.run, wrapper())
+ return await loop.run_in_executor(executor, pfunc)
+ else:
+ # Run the synchronous function in an executor
+ pfunc = partial(func, *args, **kwargs)
+ return await loop.run_in_executor(executor, pfunc)
+
+ return run
+
+
+class EventLoopThread:
+ """A class that manages an asyncio event loop running in a separate thread."""
+
+ def __init__(self) -> None:
+ """
+ Initializes the EventLoopThread by creating an event loop
+ and setting up a thread to run the loop.
+ """
+ self.loop = asyncio.new_event_loop()
+ self.thread = Thread(target=self.run_loop_in_thread, args=(self.loop,))
+
+ def __enter__(self, *a, **kw) -> asyncio.AbstractEventLoop:
+ """
+ Starts the thread running the event loop when entering the context.
+
+ Returns:
+ asyncio.AbstractEventLoop: The event loop running in the separate thread.
+ """
+ self.thread.start()
+ return self.loop
+
+ def __exit__(self, *a, **kw) -> None:
+ """
+ Stops the event loop and joins the thread when exiting the context.
+ """
+ self.loop.call_soon_threadsafe(self.loop.stop)
+ self.thread.join()
+
+ def run_loop_in_thread(self, loop: asyncio.AbstractEventLoop) -> None:
+ """
+ Sets the event loop for the current thread and runs it forever.
+
+ Args:
+ loop (asyncio.AbstractEventLoop): The event loop to run.
+ """
+ asyncio.set_event_loop(loop)
+ loop.run_forever()
+
+
+def get_active_event_loop() -> asyncio.AbstractEventLoop | None:
+ """
+ Get the active event loop.
+
+ Returns:
+ asyncio.AbstractEventLoop: The active event loop, or None if there is no active
+ event loop in the current thread.
+ """
+ try:
+ return asyncio.get_event_loop()
+ except RuntimeError as e:
+ if str(e).startswith("There is no current event loop in thread"):
+ return None
+ else:
+ raise e
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/backoff.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/backoff.py
new file mode 100644
index 00000000..34ddac7f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/backoff.py
@@ -0,0 +1,9 @@
+import asyncio
+import random
+
+
+async def exp_backoff_sleep(attempt: int, max_sleep_time: float = 5) -> None:
+ base_time = 0.1 # starting sleep time in seconds (100 milliseconds)
+ jitter = random.uniform(0, base_time) # add random jitter
+ sleep_time = min(base_time * (2**attempt) + jitter, max_sleep_time)
+ await asyncio.sleep(sleep_time)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/serialization.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/serialization.py
new file mode 100644
index 00000000..7eb1d13a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/serialization.py
@@ -0,0 +1,18 @@
+from typing import Any
+
+
+def flatten(xs: dict[str, Any], parent_key: str, separator: str) -> dict[str, Any]:
+ if not xs:
+ return {}
+
+ items: list[tuple[str, Any]] = []
+
+ for k, v in xs.items():
+ new_key = parent_key + separator + k if parent_key else k
+
+ if isinstance(v, dict):
+ items.extend(flatten(v, new_key, separator).items())
+ else:
+ items.append((new_key, v))
+
+ return dict(items)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/types.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/types.py
new file mode 100644
index 00000000..30e469f7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/types.py
@@ -0,0 +1,8 @@
+from typing import Type
+
+from pydantic import BaseModel
+
+
+class WorkflowValidator(BaseModel):
+ workflow_input: Type[BaseModel] | None = None
+ step_output: Type[BaseModel] | None = None
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/typing.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/typing.py
new file mode 100644
index 00000000..db111db5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/utils/typing.py
@@ -0,0 +1,12 @@
+from typing import Any, Type, TypeGuard, TypeVar
+
+from pydantic import BaseModel
+
+T = TypeVar("T", bound=BaseModel)
+
+
+def is_basemodel_subclass(model: Any) -> bool:
+ try:
+ return issubclass(model, BaseModel)
+ except TypeError:
+ return False
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/v2/callable.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/v2/callable.py
new file mode 100644
index 00000000..097a7d87
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/v2/callable.py
@@ -0,0 +1,202 @@
+import asyncio
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Generic,
+ List,
+ Optional,
+ TypedDict,
+ TypeVar,
+ Union,
+)
+
+from hatchet_sdk.clients.admin import ChildTriggerWorkflowOptions
+from hatchet_sdk.context.context import Context
+from hatchet_sdk.contracts.workflows_pb2 import ( # type: ignore[attr-defined]
+ CreateStepRateLimit,
+ CreateWorkflowJobOpts,
+ CreateWorkflowStepOpts,
+ CreateWorkflowVersionOpts,
+ DesiredWorkerLabels,
+ StickyStrategy,
+ WorkflowConcurrencyOpts,
+ WorkflowKind,
+)
+from hatchet_sdk.labels import DesiredWorkerLabel
+from hatchet_sdk.logger import logger
+from hatchet_sdk.rate_limit import RateLimit
+from hatchet_sdk.v2.concurrency import ConcurrencyFunction
+from hatchet_sdk.workflow_run import RunRef
+
+T = TypeVar("T")
+
+
+class HatchetCallable(Generic[T]):
+ def __init__(
+ self,
+ func: Callable[[Context], T],
+ durable: bool = False,
+ name: str = "",
+ auto_register: bool = True,
+ on_events: list[str] | None = None,
+ on_crons: list[str] | None = None,
+ version: str = "",
+ timeout: str = "60m",
+ schedule_timeout: str = "5m",
+ sticky: StickyStrategy = None,
+ retries: int = 0,
+ rate_limits: List[RateLimit] | None = None,
+ concurrency: ConcurrencyFunction | None = None,
+ on_failure: Union["HatchetCallable[T]", None] = None,
+ desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
+ default_priority: int | None = None,
+ ):
+ self.func = func
+
+ on_events = on_events or []
+ on_crons = on_crons or []
+
+ limits = None
+ if rate_limits:
+ limits = [rate_limit._req for rate_limit in rate_limits or []]
+
+ self.function_desired_worker_labels = {}
+
+ for key, d in desired_worker_labels.items():
+ value = d["value"] if "value" in d else None
+ self.function_desired_worker_labels[key] = DesiredWorkerLabels(
+ strValue=str(value) if not isinstance(value, int) else None,
+ intValue=value if isinstance(value, int) else None,
+ required=d["required"] if "required" in d else None,
+ weight=d["weight"] if "weight" in d else None,
+ comparator=d["comparator"] if "comparator" in d else None,
+ )
+ self.sticky = sticky
+ self.default_priority = default_priority
+ self.durable = durable
+ self.function_name = name.lower() or str(func.__name__).lower()
+ self.function_version = version
+ self.function_on_events = on_events
+ self.function_on_crons = on_crons
+ self.function_timeout = timeout
+ self.function_schedule_timeout = schedule_timeout
+ self.function_retries = retries
+ self.function_rate_limits = limits
+ self.function_concurrency = concurrency
+ self.function_on_failure = on_failure
+ self.function_namespace = "default"
+ self.function_auto_register = auto_register
+
+ self.is_coroutine = False
+
+ if asyncio.iscoroutinefunction(func):
+ self.is_coroutine = True
+
+ def __call__(self, context: Context) -> T:
+ return self.func(context)
+
+ def with_namespace(self, namespace: str) -> None:
+ if namespace is not None and namespace != "":
+ self.function_namespace = namespace
+ self.function_name = namespace + self.function_name
+
+ def to_workflow_opts(self) -> CreateWorkflowVersionOpts:
+ kind: WorkflowKind = WorkflowKind.FUNCTION
+
+ if self.durable:
+ kind = WorkflowKind.DURABLE
+
+ on_failure_job: CreateWorkflowJobOpts | None = None
+
+ if self.function_on_failure is not None:
+ on_failure_job = CreateWorkflowJobOpts(
+ name=self.function_name + "-on-failure",
+ steps=[
+ self.function_on_failure.to_step(),
+ ],
+ )
+
+ concurrency: WorkflowConcurrencyOpts | None = None
+
+ if self.function_concurrency is not None:
+ self.function_concurrency.set_namespace(self.function_namespace)
+ concurrency = WorkflowConcurrencyOpts(
+ action=self.function_concurrency.get_action_name(),
+ max_runs=self.function_concurrency.max_runs,
+ limit_strategy=self.function_concurrency.limit_strategy,
+ )
+
+ validated_priority = (
+ max(1, min(3, self.default_priority)) if self.default_priority else None
+ )
+ if validated_priority != self.default_priority:
+ logger.warning(
+ "Warning: Default Priority Must be between 1 and 3 -- inclusively. Adjusted to be within the range."
+ )
+
+ return CreateWorkflowVersionOpts(
+ name=self.function_name,
+ kind=kind,
+ version=self.function_version,
+ event_triggers=self.function_on_events,
+ cron_triggers=self.function_on_crons,
+ schedule_timeout=self.function_schedule_timeout,
+ sticky=self.sticky,
+ on_failure_job=on_failure_job,
+ concurrency=concurrency,
+ jobs=[
+ CreateWorkflowJobOpts(
+ name=self.function_name,
+ steps=[
+ self.to_step(),
+ ],
+ )
+ ],
+ default_priority=validated_priority,
+ )
+
+ def to_step(self) -> CreateWorkflowStepOpts:
+ return CreateWorkflowStepOpts(
+ readable_id=self.function_name,
+ action=self.get_action_name(),
+ timeout=self.function_timeout,
+ inputs="{}",
+ parents=[],
+ retries=self.function_retries,
+ rate_limits=self.function_rate_limits,
+ worker_labels=self.function_desired_worker_labels,
+ )
+
+ def get_action_name(self) -> str:
+ return self.function_namespace + ":" + self.function_name
+
+
+class DurableContext(Context):
+ def run(
+ self,
+ function: str | Callable[[Context], Any],
+ input: dict[Any, Any] = {},
+ key: str | None = None,
+ options: ChildTriggerWorkflowOptions | None = None,
+ ) -> "RunRef[T]":
+ worker_id = self.worker.id()
+
+ workflow_name = function
+
+ if not isinstance(function, str):
+ workflow_name = function.function_name
+
+ # if (
+ # options is not None
+ # and "sticky" in options
+ # and options["sticky"] == True
+ # and not self.worker.has_workflow(workflow_name)
+ # ):
+ # raise Exception(
+ # f"cannot run with sticky: workflow {workflow_name} is not registered on the worker"
+ # )
+
+ trigger_options = self._prepare_workflow_options(key, options, worker_id)
+
+ return self.admin_client.run(function, input, trigger_options)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/v2/concurrency.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/v2/concurrency.py
new file mode 100644
index 00000000..73d9e3b4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/v2/concurrency.py
@@ -0,0 +1,47 @@
+from typing import Any, Callable
+
+from hatchet_sdk.context.context import Context
+from hatchet_sdk.contracts.workflows_pb2 import ( # type: ignore[attr-defined]
+ ConcurrencyLimitStrategy,
+)
+
+
+class ConcurrencyFunction:
+ def __init__(
+ self,
+ func: Callable[[Context], str],
+ name: str = "concurrency",
+ max_runs: int = 1,
+ limit_strategy: ConcurrencyLimitStrategy = ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,
+ ):
+ self.func = func
+ self.name = name
+ self.max_runs = max_runs
+ self.limit_strategy = limit_strategy
+ self.namespace = "default"
+
+ def set_namespace(self, namespace: str) -> None:
+ self.namespace = namespace
+
+ def get_action_name(self) -> str:
+ return self.namespace + ":" + self.name
+
+ def __call__(self, *args: Any, **kwargs: Any) -> str:
+ return self.func(*args, **kwargs)
+
+ def __str__(self) -> str:
+ return f"{self.name}({self.max_runs})"
+
+ def __repr__(self) -> str:
+ return f"{self.name}({self.max_runs})"
+
+
+def concurrency(
+ name: str = "",
+ max_runs: int = 1,
+ limit_strategy: ConcurrencyLimitStrategy = ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,
+) -> Callable[[Callable[[Context], str]], ConcurrencyFunction]:
+ def inner(func: Callable[[Context], str]) -> ConcurrencyFunction:
+ return ConcurrencyFunction(func, name, max_runs, limit_strategy)
+
+ return inner
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/v2/hatchet.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/v2/hatchet.py
new file mode 100644
index 00000000..4dd3faf0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/v2/hatchet.py
@@ -0,0 +1,224 @@
+from typing import Any, Callable, TypeVar, Union
+
+from hatchet_sdk import Worker
+from hatchet_sdk.context.context import Context
+from hatchet_sdk.contracts.workflows_pb2 import ( # type: ignore[attr-defined]
+ ConcurrencyLimitStrategy,
+ StickyStrategy,
+)
+from hatchet_sdk.hatchet import Hatchet as HatchetV1
+from hatchet_sdk.hatchet import workflow
+from hatchet_sdk.labels import DesiredWorkerLabel
+from hatchet_sdk.rate_limit import RateLimit
+from hatchet_sdk.v2.callable import DurableContext, HatchetCallable
+from hatchet_sdk.v2.concurrency import ConcurrencyFunction
+from hatchet_sdk.worker.worker import register_on_worker
+
+T = TypeVar("T")
+
+
+def function(
+ name: str = "",
+ auto_register: bool = True,
+ on_events: list[str] | None = None,
+ on_crons: list[str] | None = None,
+ version: str = "",
+ timeout: str = "60m",
+ schedule_timeout: str = "5m",
+ sticky: StickyStrategy = None,
+ retries: int = 0,
+ rate_limits: list[RateLimit] | None = None,
+ desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
+ concurrency: ConcurrencyFunction | None = None,
+ on_failure: Union["HatchetCallable[T]", None] = None,
+ default_priority: int | None = None,
+) -> Callable[[Callable[[Context], str]], HatchetCallable[T]]:
+ def inner(func: Callable[[Context], T]) -> HatchetCallable[T]:
+ return HatchetCallable(
+ func=func,
+ name=name,
+ auto_register=auto_register,
+ on_events=on_events,
+ on_crons=on_crons,
+ version=version,
+ timeout=timeout,
+ schedule_timeout=schedule_timeout,
+ sticky=sticky,
+ retries=retries,
+ rate_limits=rate_limits,
+ desired_worker_labels=desired_worker_labels,
+ concurrency=concurrency,
+ on_failure=on_failure,
+ default_priority=default_priority,
+ )
+
+ return inner
+
+
+def durable(
+ name: str = "",
+ auto_register: bool = True,
+ on_events: list[str] | None = None,
+ on_crons: list[str] | None = None,
+ version: str = "",
+ timeout: str = "60m",
+ schedule_timeout: str = "5m",
+ sticky: StickyStrategy = None,
+ retries: int = 0,
+ rate_limits: list[RateLimit] | None = None,
+ desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
+ concurrency: ConcurrencyFunction | None = None,
+ on_failure: HatchetCallable[T] | None = None,
+ default_priority: int | None = None,
+) -> Callable[[HatchetCallable[T]], HatchetCallable[T]]:
+ def inner(func: HatchetCallable[T]) -> HatchetCallable[T]:
+ func.durable = True
+
+ f = function(
+ name=name,
+ auto_register=auto_register,
+ on_events=on_events,
+ on_crons=on_crons,
+ version=version,
+ timeout=timeout,
+ schedule_timeout=schedule_timeout,
+ sticky=sticky,
+ retries=retries,
+ rate_limits=rate_limits,
+ desired_worker_labels=desired_worker_labels,
+ concurrency=concurrency,
+ on_failure=on_failure,
+ default_priority=default_priority,
+ )
+
+ resp = f(func)
+
+ resp.durable = True
+
+ return resp
+
+ return inner
+
+
+def concurrency(
+ name: str = "concurrency",
+ max_runs: int = 1,
+ limit_strategy: ConcurrencyLimitStrategy = ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,
+) -> Callable[[Callable[[Context], str]], ConcurrencyFunction]:
+ def inner(func: Callable[[Context], str]) -> ConcurrencyFunction:
+ return ConcurrencyFunction(func, name, max_runs, limit_strategy)
+
+ return inner
+
+
+class Hatchet(HatchetV1):
+ dag = staticmethod(workflow)
+ concurrency = staticmethod(concurrency)
+
+ functions: list[HatchetCallable[T]] = []
+
+ def function(
+ self,
+ name: str = "",
+ auto_register: bool = True,
+ on_events: list[str] | None = None,
+ on_crons: list[str] | None = None,
+ version: str = "",
+ timeout: str = "60m",
+ schedule_timeout: str = "5m",
+ retries: int = 0,
+ rate_limits: list[RateLimit] | None = None,
+ desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
+ concurrency: ConcurrencyFunction | None = None,
+ on_failure: Union["HatchetCallable[T]", None] = None,
+ default_priority: int | None = None,
+ ) -> Callable[[Callable[[Context], Any]], Callable[[Context], Any]]:
+ resp = function(
+ name=name,
+ auto_register=auto_register,
+ on_events=on_events,
+ on_crons=on_crons,
+ version=version,
+ timeout=timeout,
+ schedule_timeout=schedule_timeout,
+ retries=retries,
+ rate_limits=rate_limits,
+ desired_worker_labels=desired_worker_labels,
+ concurrency=concurrency,
+ on_failure=on_failure,
+ default_priority=default_priority,
+ )
+
+ def wrapper(func: Callable[[Context], str]) -> HatchetCallable[T]:
+ wrapped_resp = resp(func)
+
+ if wrapped_resp.function_auto_register:
+ self.functions.append(wrapped_resp)
+
+ wrapped_resp.with_namespace(self._client.config.namespace)
+
+ return wrapped_resp
+
+ return wrapper
+
+ def durable(
+ self,
+ name: str = "",
+ auto_register: bool = True,
+ on_events: list[str] | None = None,
+ on_crons: list[str] | None = None,
+ version: str = "",
+ timeout: str = "60m",
+ schedule_timeout: str = "5m",
+ sticky: StickyStrategy = None,
+ retries: int = 0,
+ rate_limits: list[RateLimit] | None = None,
+ desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
+ concurrency: ConcurrencyFunction | None = None,
+ on_failure: Union["HatchetCallable[T]", None] = None,
+ default_priority: int | None = None,
+ ) -> Callable[[Callable[[DurableContext], Any]], Callable[[DurableContext], Any]]:
+ resp = durable(
+ name=name,
+ auto_register=auto_register,
+ on_events=on_events,
+ on_crons=on_crons,
+ version=version,
+ timeout=timeout,
+ schedule_timeout=schedule_timeout,
+ sticky=sticky,
+ retries=retries,
+ rate_limits=rate_limits,
+ desired_worker_labels=desired_worker_labels,
+ concurrency=concurrency,
+ on_failure=on_failure,
+ default_priority=default_priority,
+ )
+
+ def wrapper(func: HatchetCallable[T]) -> HatchetCallable[T]:
+ wrapped_resp = resp(func)
+
+ if wrapped_resp.function_auto_register:
+ self.functions.append(wrapped_resp)
+
+ wrapped_resp.with_namespace(self._client.config.namespace)
+
+ return wrapped_resp
+
+ return wrapper
+
+ def worker(
+ self, name: str, max_runs: int | None = None, labels: dict[str, str | int] = {}
+ ):
+ worker = Worker(
+ name=name,
+ max_runs=max_runs,
+ labels=labels,
+ config=self._client.config,
+ debug=self._client.debug,
+ )
+
+ for func in self.functions:
+ register_on_worker(func, worker)
+
+ return worker
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/__init__.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/__init__.py
new file mode 100644
index 00000000..450f0cac
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/__init__.py
@@ -0,0 +1 @@
+from .worker import Worker, WorkerStartOptions, WorkerStatus
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/action_listener_process.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/action_listener_process.py
new file mode 100644
index 00000000..08508607
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/action_listener_process.py
@@ -0,0 +1,278 @@
+import asyncio
+import logging
+import signal
+import time
+from dataclasses import dataclass, field
+from multiprocessing import Queue
+from typing import Any, List, Mapping, Optional
+
+import grpc
+
+from hatchet_sdk.clients.dispatcher.action_listener import Action
+from hatchet_sdk.clients.dispatcher.dispatcher import (
+ ActionListener,
+ GetActionListenerRequest,
+ new_dispatcher,
+)
+from hatchet_sdk.contracts.dispatcher_pb2 import (
+ GROUP_KEY_EVENT_TYPE_STARTED,
+ STEP_EVENT_TYPE_STARTED,
+ ActionType,
+)
+from hatchet_sdk.loader import ClientConfig
+from hatchet_sdk.logger import logger
+from hatchet_sdk.utils.backoff import exp_backoff_sleep
+
+ACTION_EVENT_RETRY_COUNT = 5
+
+
+@dataclass
+class ActionEvent:
+ action: Action
+ type: Any # TODO type
+ payload: Optional[str] = None
+
+
+STOP_LOOP = "STOP_LOOP" # Sentinel object to stop the loop
+
+# TODO link to a block post
+BLOCKED_THREAD_WARNING = (
+ "THE TIME TO START THE STEP RUN IS TOO LONG, THE MAIN THREAD MAY BE BLOCKED"
+)
+
+
+def noop_handler():
+ pass
+
+
+@dataclass
+class WorkerActionListenerProcess:
+ name: str
+ actions: List[str]
+ max_runs: int
+ config: ClientConfig
+ action_queue: Queue
+ event_queue: Queue
+ handle_kill: bool = True
+ debug: bool = False
+ labels: dict = field(default_factory=dict)
+
+ listener: ActionListener = field(init=False, default=None)
+
+ killing: bool = field(init=False, default=False)
+
+ action_loop_task: asyncio.Task = field(init=False, default=None)
+ event_send_loop_task: asyncio.Task = field(init=False, default=None)
+
+ running_step_runs: Mapping[str, float] = field(init=False, default_factory=dict)
+
+ def __post_init__(self):
+ if self.debug:
+ logger.setLevel(logging.DEBUG)
+
+ loop = asyncio.get_event_loop()
+ loop.add_signal_handler(signal.SIGINT, noop_handler)
+ loop.add_signal_handler(signal.SIGTERM, noop_handler)
+ loop.add_signal_handler(
+ signal.SIGQUIT, lambda: asyncio.create_task(self.exit_gracefully())
+ )
+
+ async def start(self, retry_attempt=0):
+ if retry_attempt > 5:
+ logger.error("could not start action listener")
+ return
+
+ logger.debug(f"starting action listener: {self.name}")
+
+ try:
+ self.dispatcher_client = new_dispatcher(self.config)
+
+ self.listener = await self.dispatcher_client.get_action_listener(
+ GetActionListenerRequest(
+ worker_name=self.name,
+ services=["default"],
+ actions=self.actions,
+ max_runs=self.max_runs,
+ _labels=self.labels,
+ )
+ )
+
+ logger.debug(f"acquired action listener: {self.listener.worker_id}")
+ except grpc.RpcError as rpc_error:
+ logger.error(f"could not start action listener: {rpc_error}")
+ return
+
+ # Start both loops as background tasks
+ self.action_loop_task = asyncio.create_task(self.start_action_loop())
+ self.event_send_loop_task = asyncio.create_task(self.start_event_send_loop())
+ self.blocked_main_loop = asyncio.create_task(self.start_blocked_main_loop())
+
+ # TODO move event methods to separate class
+ async def _get_event(self):
+ loop = asyncio.get_running_loop()
+ return await loop.run_in_executor(None, self.event_queue.get)
+
+ async def start_event_send_loop(self):
+ while True:
+ event: ActionEvent = await self._get_event()
+ if event == STOP_LOOP:
+ logger.debug("stopping event send loop...")
+ break
+
+ logger.debug(f"tx: event: {event.action.action_id}/{event.type}")
+ asyncio.create_task(self.send_event(event))
+
+ async def start_blocked_main_loop(self):
+ threshold = 1
+ while not self.killing:
+ count = 0
+ for step_run_id, start_time in self.running_step_runs.items():
+ diff = self.now() - start_time
+ if diff > threshold:
+ count += 1
+
+ if count > 0:
+ logger.warning(f"{BLOCKED_THREAD_WARNING}: Waiting Steps {count}")
+ await asyncio.sleep(1)
+
+ async def send_event(self, event: ActionEvent, retry_attempt: int = 1):
+ try:
+ match event.action.action_type:
+ # FIXME: all events sent from an execution of a function are of type ActionType.START_STEP_RUN since
+ # the action is re-used. We should change this.
+ case ActionType.START_STEP_RUN:
+ # TODO right now we're sending two start_step_run events
+ # one on the action loop and one on the event loop
+ # ideally we change the first to an ack to set the time
+ if event.type == STEP_EVENT_TYPE_STARTED:
+ if event.action.step_run_id in self.running_step_runs:
+ diff = (
+ self.now()
+ - self.running_step_runs[event.action.step_run_id]
+ )
+ if diff > 0.1:
+ logger.warning(
+ f"{BLOCKED_THREAD_WARNING}: time to start: {diff}s"
+ )
+ else:
+ logger.debug(f"start time: {diff}")
+ del self.running_step_runs[event.action.step_run_id]
+ else:
+ self.running_step_runs[event.action.step_run_id] = (
+ self.now()
+ )
+
+ asyncio.create_task(
+ self.dispatcher_client.send_step_action_event(
+ event.action, event.type, event.payload
+ )
+ )
+ case ActionType.CANCEL_STEP_RUN:
+ logger.debug("unimplemented event send")
+ case ActionType.START_GET_GROUP_KEY:
+ asyncio.create_task(
+ self.dispatcher_client.send_group_key_action_event(
+ event.action, event.type, event.payload
+ )
+ )
+ case _:
+ logger.error("unknown action type for event send")
+ except Exception as e:
+ logger.error(
+ f"could not send action event ({retry_attempt}/{ACTION_EVENT_RETRY_COUNT}): {e}"
+ )
+ if retry_attempt <= ACTION_EVENT_RETRY_COUNT:
+ await exp_backoff_sleep(retry_attempt, 1)
+ await self.send_event(event, retry_attempt + 1)
+
+ def now(self):
+ return time.time()
+
+ async def start_action_loop(self):
+ try:
+ async for action in self.listener:
+ if action is None:
+ break
+
+ # Process the action here
+ match action.action_type:
+ case ActionType.START_STEP_RUN:
+ self.event_queue.put(
+ ActionEvent(
+ action=action,
+ type=STEP_EVENT_TYPE_STARTED, # TODO ack type
+ )
+ )
+ logger.info(
+ f"rx: start step run: {action.step_run_id}/{action.action_id}"
+ )
+
+ # TODO handle this case better...
+ if action.step_run_id in self.running_step_runs:
+ logger.warning(
+ f"step run already running: {action.step_run_id}"
+ )
+
+ case ActionType.CANCEL_STEP_RUN:
+ logger.info(f"rx: cancel step run: {action.step_run_id}")
+ case ActionType.START_GET_GROUP_KEY:
+ self.event_queue.put(
+ ActionEvent(
+ action=action,
+ type=GROUP_KEY_EVENT_TYPE_STARTED, # TODO ack type
+ )
+ )
+ logger.info(
+ f"rx: start group key: {action.get_group_key_run_id}"
+ )
+ case _:
+ logger.error(
+ f"rx: unknown action type ({action.action_type}): {action.action_type}"
+ )
+ try:
+ self.action_queue.put(action)
+ except Exception as e:
+ logger.error(f"error putting action: {e}")
+
+ except Exception as e:
+ logger.error(f"error in action loop: {e}")
+ finally:
+ logger.info("action loop closed")
+ if not self.killing:
+ await self.exit_gracefully(skip_unregister=True)
+
+ async def cleanup(self):
+ self.killing = True
+
+ if self.listener is not None:
+ self.listener.cleanup()
+
+ self.event_queue.put(STOP_LOOP)
+
+ async def exit_gracefully(self, skip_unregister=False):
+ if self.killing:
+ return
+
+ logger.debug("closing action listener...")
+
+ await self.cleanup()
+
+ while not self.event_queue.empty():
+ pass
+
+ logger.info("action listener closed")
+
+ def exit_forcefully(self):
+ asyncio.run(self.cleanup())
+ logger.debug("forcefully closing listener...")
+
+
+def worker_action_listener_process(*args, **kwargs):
+ async def run():
+ process = WorkerActionListenerProcess(*args, **kwargs)
+ await process.start()
+ # Keep the process running
+ while not process.killing:
+ await asyncio.sleep(0.1)
+
+ asyncio.run(run())
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/run_loop_manager.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/run_loop_manager.py
new file mode 100644
index 00000000..27ed788c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/run_loop_manager.py
@@ -0,0 +1,112 @@
+import asyncio
+import logging
+from dataclasses import dataclass, field
+from multiprocessing import Queue
+from typing import Callable, TypeVar
+
+from hatchet_sdk import Context
+from hatchet_sdk.client import Client, new_client_raw
+from hatchet_sdk.clients.dispatcher.action_listener import Action
+from hatchet_sdk.loader import ClientConfig
+from hatchet_sdk.logger import logger
+from hatchet_sdk.utils.types import WorkflowValidator
+from hatchet_sdk.worker.runner.runner import Runner
+from hatchet_sdk.worker.runner.utils.capture_logs import capture_logs
+
+STOP_LOOP = "STOP_LOOP"
+
+T = TypeVar("T")
+
+
+@dataclass
+class WorkerActionRunLoopManager:
+ name: str
+ action_registry: dict[str, Callable[[Context], T]]
+ validator_registry: dict[str, WorkflowValidator]
+ max_runs: int | None
+ config: ClientConfig
+ action_queue: Queue
+ event_queue: Queue
+ loop: asyncio.AbstractEventLoop
+ handle_kill: bool = True
+ debug: bool = False
+ labels: dict[str, str | int] = field(default_factory=dict)
+
+ client: Client = field(init=False, default=None)
+
+ killing: bool = field(init=False, default=False)
+ runner: Runner = field(init=False, default=None)
+
+ def __post_init__(self):
+ if self.debug:
+ logger.setLevel(logging.DEBUG)
+ self.client = new_client_raw(self.config, self.debug)
+ self.start()
+
+ def start(self, retry_count=1):
+ k = self.loop.create_task(self.async_start(retry_count))
+
+ async def async_start(self, retry_count=1):
+ await capture_logs(
+ self.client.logInterceptor,
+ self.client.event,
+ self._async_start,
+ )(retry_count=retry_count)
+
+ async def _async_start(self, retry_count: int = 1) -> None:
+ logger.info("starting runner...")
+ self.loop = asyncio.get_running_loop()
+ # needed for graceful termination
+ k = self.loop.create_task(self._start_action_loop())
+ await k
+
+ def cleanup(self) -> None:
+ self.killing = True
+
+ self.action_queue.put(STOP_LOOP)
+
+ async def wait_for_tasks(self) -> None:
+ if self.runner:
+ await self.runner.wait_for_tasks()
+
+ async def _start_action_loop(self) -> None:
+ self.runner = Runner(
+ self.name,
+ self.event_queue,
+ self.max_runs,
+ self.handle_kill,
+ self.action_registry,
+ self.validator_registry,
+ self.config,
+ self.labels,
+ )
+
+ logger.debug(f"'{self.name}' waiting for {list(self.action_registry.keys())}")
+ while not self.killing:
+ action: Action = await self._get_action()
+ if action == STOP_LOOP:
+ logger.debug("stopping action runner loop...")
+ break
+
+ self.runner.run(action)
+ logger.debug("action runner loop stopped")
+
+ async def _get_action(self):
+ return await self.loop.run_in_executor(None, self.action_queue.get)
+
+ async def exit_gracefully(self) -> None:
+ if self.killing:
+ return
+
+ logger.info("gracefully exiting runner...")
+
+ self.cleanup()
+
+ # Wait for 1 second to allow last calls to flush. These are calls which have been
+ # added to the event loop as callbacks to tasks, so we're not aware of them in the
+ # task list.
+ await asyncio.sleep(1)
+
+ def exit_forcefully(self) -> None:
+ logger.info("forcefully exiting runner...")
+ self.cleanup()
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/runner.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/runner.py
new file mode 100644
index 00000000..01e61bcf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/runner.py
@@ -0,0 +1,460 @@
+import asyncio
+import contextvars
+import ctypes
+import functools
+import json
+import time
+import traceback
+from concurrent.futures import ThreadPoolExecutor
+from enum import Enum
+from multiprocessing import Queue
+from threading import Thread, current_thread
+from typing import Any, Callable, Dict, cast
+
+from pydantic import BaseModel
+
+from hatchet_sdk.client import new_client_raw
+from hatchet_sdk.clients.admin import new_admin
+from hatchet_sdk.clients.dispatcher.action_listener import Action
+from hatchet_sdk.clients.dispatcher.dispatcher import new_dispatcher
+from hatchet_sdk.clients.run_event_listener import new_listener
+from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener
+from hatchet_sdk.context import Context # type: ignore[attr-defined]
+from hatchet_sdk.context.worker_context import WorkerContext
+from hatchet_sdk.contracts.dispatcher_pb2 import (
+ GROUP_KEY_EVENT_TYPE_COMPLETED,
+ GROUP_KEY_EVENT_TYPE_FAILED,
+ GROUP_KEY_EVENT_TYPE_STARTED,
+ STEP_EVENT_TYPE_COMPLETED,
+ STEP_EVENT_TYPE_FAILED,
+ STEP_EVENT_TYPE_STARTED,
+ ActionType,
+)
+from hatchet_sdk.loader import ClientConfig
+from hatchet_sdk.logger import logger
+from hatchet_sdk.utils.types import WorkflowValidator
+from hatchet_sdk.v2.callable import DurableContext
+from hatchet_sdk.worker.action_listener_process import ActionEvent
+from hatchet_sdk.worker.runner.utils.capture_logs import copy_context_vars, sr, wr
+
+
+class WorkerStatus(Enum):
+ INITIALIZED = 1
+ STARTING = 2
+ HEALTHY = 3
+ UNHEALTHY = 4
+
+
+class Runner:
+ def __init__(
+ self,
+ name: str,
+ event_queue: "Queue[Any]",
+ max_runs: int | None = None,
+ handle_kill: bool = True,
+ action_registry: dict[str, Callable[..., Any]] = {},
+ validator_registry: dict[str, WorkflowValidator] = {},
+ config: ClientConfig = ClientConfig(),
+ labels: dict[str, str | int] = {},
+ ):
+ # We store the config so we can dynamically create clients for the dispatcher client.
+ self.config = config
+ self.client = new_client_raw(config)
+ self.name = self.client.config.namespace + name
+ self.max_runs = max_runs
+ self.tasks: dict[str, asyncio.Task[Any]] = {} # Store run ids and futures
+ self.contexts: dict[str, Context] = {} # Store run ids and contexts
+ self.action_registry: dict[str, Callable[..., Any]] = action_registry
+ self.validator_registry = validator_registry
+
+ self.event_queue = event_queue
+
+ # The thread pool is used for synchronous functions which need to run concurrently
+ self.thread_pool = ThreadPoolExecutor(max_workers=max_runs)
+ self.threads: Dict[str, Thread] = {} # Store run ids and threads
+
+ self.killing = False
+ self.handle_kill = handle_kill
+
+ # We need to initialize a new admin and dispatcher client *after* we've started the event loop,
+ # otherwise the grpc.aio methods will use a different event loop and we'll get a bunch of errors.
+ self.dispatcher_client = new_dispatcher(self.config)
+ self.admin_client = new_admin(self.config)
+ self.workflow_run_event_listener = new_listener(self.config)
+ self.client.workflow_listener = PooledWorkflowRunListener(self.config)
+
+ self.worker_context = WorkerContext(
+ labels=labels, client=new_client_raw(config).dispatcher
+ )
+
+ def create_workflow_run_url(self, action: Action) -> str:
+ return f"{self.config.server_url}/workflow-runs/{action.workflow_run_id}?tenant={action.tenant_id}"
+
+ def run(self, action: Action) -> None:
+ if self.worker_context.id() is None:
+ self.worker_context._worker_id = action.worker_id
+
+ match action.action_type:
+ case ActionType.START_STEP_RUN:
+ log = f"run: start step: {action.action_id}/{action.step_run_id}"
+ logger.info(log)
+ asyncio.create_task(self.handle_start_step_run(action))
+ case ActionType.CANCEL_STEP_RUN:
+ log = f"cancel: step run: {action.action_id}/{action.step_run_id}"
+ logger.info(log)
+ asyncio.create_task(self.handle_cancel_action(action.step_run_id))
+ case ActionType.START_GET_GROUP_KEY:
+ log = f"run: get group key: {action.action_id}/{action.get_group_key_run_id}"
+ logger.info(log)
+ asyncio.create_task(self.handle_start_group_key_run(action))
+ case _:
+ log = f"unknown action type: {action.action_type}"
+ logger.error(log)
+
+ def step_run_callback(self, action: Action) -> Callable[[asyncio.Task[Any]], None]:
+ def inner_callback(task: asyncio.Task[Any]) -> None:
+ self.cleanup_run_id(action.step_run_id)
+
+ errored = False
+ cancelled = task.cancelled()
+
+ # Get the output from the future
+ try:
+ if not cancelled:
+ output = task.result()
+ except Exception as e:
+ errored = True
+
+ # This except is coming from the application itself, so we want to send that to the Hatchet instance
+ self.event_queue.put(
+ ActionEvent(
+ action=action,
+ type=STEP_EVENT_TYPE_FAILED,
+ payload=str(errorWithTraceback(f"{e}", e)),
+ )
+ )
+
+ logger.error(
+ f"failed step run: {action.action_id}/{action.step_run_id}"
+ )
+
+ if not errored and not cancelled:
+ self.event_queue.put(
+ ActionEvent(
+ action=action,
+ type=STEP_EVENT_TYPE_COMPLETED,
+ payload=self.serialize_output(output),
+ )
+ )
+
+ logger.info(
+ f"finished step run: {action.action_id}/{action.step_run_id}"
+ )
+
+ return inner_callback
+
+ def group_key_run_callback(
+ self, action: Action
+ ) -> Callable[[asyncio.Task[Any]], None]:
+ def inner_callback(task: asyncio.Task[Any]) -> None:
+ self.cleanup_run_id(action.get_group_key_run_id)
+
+ errored = False
+ cancelled = task.cancelled()
+
+ # Get the output from the future
+ try:
+ if not cancelled:
+ output = task.result()
+ except Exception as e:
+ errored = True
+ self.event_queue.put(
+ ActionEvent(
+ action=action,
+ type=GROUP_KEY_EVENT_TYPE_FAILED,
+ payload=str(errorWithTraceback(f"{e}", e)),
+ )
+ )
+
+ logger.error(
+ f"failed step run: {action.action_id}/{action.step_run_id}"
+ )
+
+ if not errored and not cancelled:
+ self.event_queue.put(
+ ActionEvent(
+ action=action,
+ type=GROUP_KEY_EVENT_TYPE_COMPLETED,
+ payload=self.serialize_output(output),
+ )
+ )
+
+ logger.info(
+ f"finished step run: {action.action_id}/{action.step_run_id}"
+ )
+
+ return inner_callback
+
+ ## TODO: Stricter type hinting here
+ def thread_action_func(
+ self, context: Context, action_func: Callable[..., Any], action: Action
+ ) -> Any:
+ if action.step_run_id is not None and action.step_run_id != "":
+ self.threads[action.step_run_id] = current_thread()
+ elif (
+ action.get_group_key_run_id is not None
+ and action.get_group_key_run_id != ""
+ ):
+ self.threads[action.get_group_key_run_id] = current_thread()
+
+ return action_func(context)
+
+ ## TODO: Stricter type hinting here
+ # We wrap all actions in an async func
+ async def async_wrapped_action_func(
+ self,
+ context: Context,
+ action_func: Callable[..., Any],
+ action: Action,
+ run_id: str,
+ ) -> Any:
+ wr.set(context.workflow_run_id())
+ sr.set(context.step_run_id)
+
+ try:
+ if (
+ hasattr(action_func, "is_coroutine") and action_func.is_coroutine
+ ) or asyncio.iscoroutinefunction(action_func):
+ return await action_func(context)
+ else:
+ pfunc = functools.partial(
+ # we must copy the context vars to the new thread, as only asyncio natively supports
+ # contextvars
+ copy_context_vars,
+ contextvars.copy_context().items(),
+ self.thread_action_func,
+ context,
+ action_func,
+ action,
+ )
+
+ loop = asyncio.get_event_loop()
+ return await loop.run_in_executor(self.thread_pool, pfunc)
+ except Exception as e:
+ logger.error(
+ errorWithTraceback(
+ f"exception raised in action ({action.action_id}, retry={action.retry_count}):\n{e}",
+ e,
+ )
+ )
+ raise e
+ finally:
+ self.cleanup_run_id(run_id)
+
+ def cleanup_run_id(self, run_id: str | None) -> None:
+ if run_id in self.tasks:
+ del self.tasks[run_id]
+
+ if run_id in self.threads:
+ del self.threads[run_id]
+
+ if run_id in self.contexts:
+ del self.contexts[run_id]
+
+ def create_context(
+ self, action: Action, action_func: Callable[..., Any] | None
+ ) -> Context | DurableContext:
+ if hasattr(action_func, "durable") and getattr(action_func, "durable"):
+ return DurableContext(
+ action,
+ self.dispatcher_client,
+ self.admin_client,
+ self.client.event,
+ self.client.rest,
+ self.client.workflow_listener,
+ self.workflow_run_event_listener,
+ self.worker_context,
+ self.client.config.namespace,
+ validator_registry=self.validator_registry,
+ )
+
+ return Context(
+ action,
+ self.dispatcher_client,
+ self.admin_client,
+ self.client.event,
+ self.client.rest,
+ self.client.workflow_listener,
+ self.workflow_run_event_listener,
+ self.worker_context,
+ self.client.config.namespace,
+ validator_registry=self.validator_registry,
+ )
+
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
+ async def handle_start_step_run(self, action: Action) -> None | Exception:
+ action_name = action.action_id
+
+ # Find the corresponding action function from the registry
+ action_func = self.action_registry.get(action_name)
+
+ context = self.create_context(action, action_func)
+
+ self.contexts[action.step_run_id] = context
+
+ if action_func:
+ self.event_queue.put(
+ ActionEvent(
+ action=action,
+ type=STEP_EVENT_TYPE_STARTED,
+ )
+ )
+
+ loop = asyncio.get_event_loop()
+ task = loop.create_task(
+ self.async_wrapped_action_func(
+ context, action_func, action, action.step_run_id
+ )
+ )
+
+ task.add_done_callback(self.step_run_callback(action))
+ self.tasks[action.step_run_id] = task
+
+ try:
+ await task
+ except Exception as e:
+ return e
+
+ return None
+
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
+ async def handle_start_group_key_run(self, action: Action) -> Exception | None:
+ action_name = action.action_id
+ context = Context(
+ action,
+ self.dispatcher_client,
+ self.admin_client,
+ self.client.event,
+ self.client.rest,
+ self.client.workflow_listener,
+ self.workflow_run_event_listener,
+ self.worker_context,
+ self.client.config.namespace,
+ )
+
+ self.contexts[action.get_group_key_run_id] = context
+
+ # Find the corresponding action function from the registry
+ action_func = self.action_registry.get(action_name)
+
+ if action_func:
+ # send an event that the group key run has started
+ self.event_queue.put(
+ ActionEvent(
+ action=action,
+ type=GROUP_KEY_EVENT_TYPE_STARTED,
+ )
+ )
+
+ loop = asyncio.get_event_loop()
+ task = loop.create_task(
+ self.async_wrapped_action_func(
+ context, action_func, action, action.get_group_key_run_id
+ )
+ )
+
+ task.add_done_callback(self.group_key_run_callback(action))
+ self.tasks[action.get_group_key_run_id] = task
+
+ try:
+ await task
+ except Exception as e:
+ return e
+
+ return None
+
+ def force_kill_thread(self, thread: Thread) -> None:
+ """Terminate a python threading.Thread."""
+ try:
+ if not thread.is_alive():
+ return
+
+ ident = cast(int, thread.ident)
+
+ logger.info(f"Forcefully terminating thread {ident}")
+
+ exc = ctypes.py_object(SystemExit)
+ res = ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(ident), exc)
+ if res == 0:
+ raise ValueError("Invalid thread ID")
+ elif res != 1:
+ logger.error("PyThreadState_SetAsyncExc failed")
+
+ # Call with exception set to 0 is needed to cleanup properly.
+ ctypes.pythonapi.PyThreadState_SetAsyncExc(thread.ident, 0)
+ raise SystemError("PyThreadState_SetAsyncExc failed")
+
+ logger.info(f"Successfully terminated thread {ident}")
+
+ # Immediately add a new thread to the thread pool, because we've actually killed a worker
+ # in the ThreadPoolExecutor
+ self.thread_pool.submit(lambda: None)
+ except Exception as e:
+ logger.exception(f"Failed to terminate thread: {e}")
+
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
+ async def handle_cancel_action(self, run_id: str) -> None:
+ try:
+ # call cancel to signal the context to stop
+ if run_id in self.contexts:
+ context = self.contexts.get(run_id)
+
+ if context:
+ context.cancel()
+
+ await asyncio.sleep(1)
+
+ if run_id in self.tasks:
+ future = self.tasks.get(run_id)
+
+ if future:
+ future.cancel()
+
+ # check if thread is still running, if so, print a warning
+ if run_id in self.threads:
+ thread = self.threads.get(run_id)
+ if thread and self.client.config.enable_force_kill_sync_threads:
+ self.force_kill_thread(thread)
+ await asyncio.sleep(1)
+
+ logger.warning(
+ f"Thread {self.threads[run_id].ident} with run id {run_id} is still running after cancellation. This could cause the thread pool to get blocked and prevent new tasks from running."
+ )
+ finally:
+ self.cleanup_run_id(run_id)
+
+ def serialize_output(self, output: Any) -> str:
+
+ if isinstance(output, BaseModel):
+ return output.model_dump_json()
+
+ if output is not None:
+ try:
+ return json.dumps(output)
+ except Exception as e:
+ logger.error(f"Could not serialize output: {e}")
+ return str(output)
+
+ return ""
+
+ async def wait_for_tasks(self) -> None:
+ running = len(self.tasks.keys())
+ while running > 0:
+ logger.info(f"waiting for {running} tasks to finish...")
+ await asyncio.sleep(1)
+ running = len(self.tasks.keys())
+
+
+def errorWithTraceback(message: str, e: Exception) -> str:
+ trace = "".join(traceback.format_exception(type(e), e, e.__traceback__))
+ return f"{message}\n{trace}"
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/utils/capture_logs.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/utils/capture_logs.py
new file mode 100644
index 00000000..08c57de8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/utils/capture_logs.py
@@ -0,0 +1,81 @@
+import contextvars
+import functools
+import logging
+from concurrent.futures import ThreadPoolExecutor
+from io import StringIO
+from typing import Any, Coroutine
+
+from hatchet_sdk import logger
+from hatchet_sdk.clients.events import EventClient
+
+wr: contextvars.ContextVar[str | None] = contextvars.ContextVar(
+ "workflow_run_id", default=None
+)
+sr: contextvars.ContextVar[str | None] = contextvars.ContextVar(
+ "step_run_id", default=None
+)
+
+
+def copy_context_vars(ctx_vars, func, *args, **kwargs):
+ for var, value in ctx_vars:
+ var.set(value)
+ return func(*args, **kwargs)
+
+
+class InjectingFilter(logging.Filter):
+ # For some reason, only the InjectingFilter has access to the contextvars method sr.get(),
+ # otherwise we would use emit within the CustomLogHandler
+ def filter(self, record):
+ record.workflow_run_id = wr.get()
+ record.step_run_id = sr.get()
+ return True
+
+
+class CustomLogHandler(logging.StreamHandler):
+ def __init__(self, event_client: EventClient, stream=None):
+ super().__init__(stream)
+ self.logger_thread_pool = ThreadPoolExecutor(max_workers=1)
+ self.event_client = event_client
+
+ def _log(self, line: str, step_run_id: str | None):
+ try:
+ if not step_run_id:
+ return
+
+ self.event_client.log(message=line, step_run_id=step_run_id)
+ except Exception as e:
+ logger.error(f"Error logging: {e}")
+
+ def emit(self, record):
+ super().emit(record)
+
+ log_entry = self.format(record)
+ self.logger_thread_pool.submit(self._log, log_entry, record.step_run_id)
+
+
+def capture_logs(
+ logger: logging.Logger,
+ event_client: EventClient,
+ func: Coroutine[Any, Any, Any],
+):
+ @functools.wraps(func)
+ async def wrapper(*args, **kwargs):
+ if not logger:
+ raise Exception("No logger configured on client")
+
+ log_stream = StringIO()
+ custom_handler = CustomLogHandler(event_client, log_stream)
+ custom_handler.setLevel(logging.INFO)
+ custom_handler.addFilter(InjectingFilter())
+ logger.addHandler(custom_handler)
+
+ try:
+ result = await func(*args, **kwargs)
+ finally:
+ custom_handler.flush()
+ logger.removeHandler(custom_handler)
+ log_stream.close()
+
+ return result
+
+ return wrapper
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/utils/error_with_traceback.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/utils/error_with_traceback.py
new file mode 100644
index 00000000..9c09602f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/runner/utils/error_with_traceback.py
@@ -0,0 +1,6 @@
+import traceback
+
+
+def errorWithTraceback(message: str, e: Exception):
+ trace = "".join(traceback.format_exception(type(e), e, e.__traceback__))
+ return f"{message}\n{trace}"
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/worker.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/worker.py
new file mode 100644
index 00000000..b6ec1531
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/worker/worker.py
@@ -0,0 +1,392 @@
+import asyncio
+import multiprocessing
+import multiprocessing.context
+import os
+import signal
+import sys
+from concurrent.futures import Future
+from dataclasses import dataclass, field
+from enum import Enum
+from multiprocessing import Queue
+from multiprocessing.process import BaseProcess
+from types import FrameType
+from typing import Any, Callable, TypeVar, get_type_hints
+
+from aiohttp import web
+from aiohttp.web_request import Request
+from aiohttp.web_response import Response
+from prometheus_client import CONTENT_TYPE_LATEST, Gauge, generate_latest
+
+from hatchet_sdk import Context
+from hatchet_sdk.client import Client, new_client_raw
+from hatchet_sdk.contracts.workflows_pb2 import CreateWorkflowVersionOpts
+from hatchet_sdk.loader import ClientConfig
+from hatchet_sdk.logger import logger
+from hatchet_sdk.utils.types import WorkflowValidator
+from hatchet_sdk.utils.typing import is_basemodel_subclass
+from hatchet_sdk.v2.callable import HatchetCallable
+from hatchet_sdk.v2.concurrency import ConcurrencyFunction
+from hatchet_sdk.worker.action_listener_process import worker_action_listener_process
+from hatchet_sdk.worker.runner.run_loop_manager import WorkerActionRunLoopManager
+from hatchet_sdk.workflow import WorkflowInterface
+
+T = TypeVar("T")
+
+
+class WorkerStatus(Enum):
+ INITIALIZED = 1
+ STARTING = 2
+ HEALTHY = 3
+ UNHEALTHY = 4
+
+
+@dataclass
+class WorkerStartOptions:
+ loop: asyncio.AbstractEventLoop | None = field(default=None)
+
+
+TWorkflow = TypeVar("TWorkflow", bound=object)
+
+
+class Worker:
+ def __init__(
+ self,
+ name: str,
+ config: ClientConfig = ClientConfig(),
+ max_runs: int | None = None,
+ labels: dict[str, str | int] = {},
+ debug: bool = False,
+ owned_loop: bool = True,
+ handle_kill: bool = True,
+ ) -> None:
+ self.name = name
+ self.config = config
+ self.max_runs = max_runs
+ self.debug = debug
+ self.labels = labels
+ self.handle_kill = handle_kill
+ self.owned_loop = owned_loop
+
+ self.client: Client
+
+ self.action_registry: dict[str, Callable[[Context], Any]] = {}
+ self.validator_registry: dict[str, WorkflowValidator] = {}
+
+ self.killing: bool = False
+ self._status: WorkerStatus
+
+ self.action_listener_process: BaseProcess
+ self.action_listener_health_check: asyncio.Task[Any]
+ self.action_runner: WorkerActionRunLoopManager
+
+ self.ctx = multiprocessing.get_context("spawn")
+
+ self.action_queue: "Queue[Any]" = self.ctx.Queue()
+ self.event_queue: "Queue[Any]" = self.ctx.Queue()
+
+ self.loop: asyncio.AbstractEventLoop
+
+ self.client = new_client_raw(self.config, self.debug)
+ self.name = self.client.config.namespace + self.name
+
+ self._setup_signal_handlers()
+
+ self.worker_status_gauge = Gauge(
+ "hatchet_worker_status", "Current status of the Hatchet worker"
+ )
+
+ def register_function(self, action: str, func: Callable[[Context], Any]) -> None:
+ self.action_registry[action] = func
+
+ def register_workflow_from_opts(
+ self, name: str, opts: CreateWorkflowVersionOpts
+ ) -> None:
+ try:
+ self.client.admin.put_workflow(opts.name, opts)
+ except Exception as e:
+ logger.error(f"failed to register workflow: {opts.name}")
+ logger.error(e)
+ sys.exit(1)
+
+ def register_workflow(self, workflow: TWorkflow) -> None:
+ ## Hack for typing
+ assert isinstance(workflow, WorkflowInterface)
+
+ namespace = self.client.config.namespace
+
+ try:
+ self.client.admin.put_workflow(
+ workflow.get_name(namespace), workflow.get_create_opts(namespace)
+ )
+ except Exception as e:
+ logger.error(f"failed to register workflow: {workflow.get_name(namespace)}")
+ logger.error(e)
+ sys.exit(1)
+
+ def create_action_function(
+ action_func: Callable[..., T]
+ ) -> Callable[[Context], T]:
+ def action_function(context: Context) -> T:
+ return action_func(workflow, context)
+
+ if asyncio.iscoroutinefunction(action_func):
+ setattr(action_function, "is_coroutine", True)
+ else:
+ setattr(action_function, "is_coroutine", False)
+
+ return action_function
+
+ for action_name, action_func in workflow.get_actions(namespace):
+ self.action_registry[action_name] = create_action_function(action_func)
+ return_type = get_type_hints(action_func).get("return")
+
+ self.validator_registry[action_name] = WorkflowValidator(
+ workflow_input=workflow.input_validator,
+ step_output=return_type if is_basemodel_subclass(return_type) else None,
+ )
+
+ def status(self) -> WorkerStatus:
+ return self._status
+
+ def setup_loop(self, loop: asyncio.AbstractEventLoop | None = None) -> bool:
+ try:
+ loop = loop or asyncio.get_running_loop()
+ self.loop = loop
+ created_loop = False
+ logger.debug("using existing event loop")
+ return created_loop
+ except RuntimeError:
+ self.loop = asyncio.new_event_loop()
+ logger.debug("creating new event loop")
+ asyncio.set_event_loop(self.loop)
+ created_loop = True
+ return created_loop
+
+ async def health_check_handler(self, request: Request) -> Response:
+ status = self.status()
+
+ return web.json_response({"status": status.name})
+
+ async def metrics_handler(self, request: Request) -> Response:
+ self.worker_status_gauge.set(1 if self.status() == WorkerStatus.HEALTHY else 0)
+
+ return web.Response(body=generate_latest(), content_type="text/plain")
+
+ async def start_health_server(self) -> None:
+ port = self.config.worker_healthcheck_port or 8001
+
+ app = web.Application()
+ app.add_routes(
+ [
+ web.get("/health", self.health_check_handler),
+ web.get("/metrics", self.metrics_handler),
+ ]
+ )
+
+ runner = web.AppRunner(app)
+
+ try:
+ await runner.setup()
+ await web.TCPSite(runner, "0.0.0.0", port).start()
+ except Exception as e:
+ logger.error("failed to start healthcheck server")
+ logger.error(str(e))
+ return
+
+ logger.info(f"healthcheck server running on port {port}")
+
+ def start(
+ self, options: WorkerStartOptions = WorkerStartOptions()
+ ) -> Future[asyncio.Task[Any] | None]:
+ self.owned_loop = self.setup_loop(options.loop)
+
+ f = asyncio.run_coroutine_threadsafe(
+ self.async_start(options, _from_start=True), self.loop
+ )
+
+ # start the loop and wait until its closed
+ if self.owned_loop:
+ self.loop.run_forever()
+
+ if self.handle_kill:
+ sys.exit(0)
+
+ return f
+
+ ## Start methods
+ async def async_start(
+ self,
+ options: WorkerStartOptions = WorkerStartOptions(),
+ _from_start: bool = False,
+ ) -> Any | None:
+ main_pid = os.getpid()
+ logger.info("------------------------------------------")
+ logger.info("STARTING HATCHET...")
+ logger.debug(f"worker runtime starting on PID: {main_pid}")
+
+ self._status = WorkerStatus.STARTING
+
+ if len(self.action_registry.keys()) == 0:
+ logger.error(
+ "no actions registered, register workflows or actions before starting worker"
+ )
+ return None
+
+ # non blocking setup
+ if not _from_start:
+ self.setup_loop(options.loop)
+
+ if self.config.worker_healthcheck_enabled:
+ await self.start_health_server()
+
+ self.action_listener_process = self._start_listener()
+
+ self.action_runner = self._run_action_runner()
+
+ self.action_listener_health_check = self.loop.create_task(
+ self._check_listener_health()
+ )
+
+ return await self.action_listener_health_check
+
+ def _run_action_runner(self) -> WorkerActionRunLoopManager:
+ # Retrieve the shared queue
+ return WorkerActionRunLoopManager(
+ self.name,
+ self.action_registry,
+ self.validator_registry,
+ self.max_runs,
+ self.config,
+ self.action_queue,
+ self.event_queue,
+ self.loop,
+ self.handle_kill,
+ self.client.debug,
+ self.labels,
+ )
+
+ def _start_listener(self) -> multiprocessing.context.SpawnProcess:
+ action_list = [str(key) for key in self.action_registry.keys()]
+
+ try:
+ process = self.ctx.Process(
+ target=worker_action_listener_process,
+ args=(
+ self.name,
+ action_list,
+ self.max_runs,
+ self.config,
+ self.action_queue,
+ self.event_queue,
+ self.handle_kill,
+ self.client.debug,
+ self.labels,
+ ),
+ )
+ process.start()
+ logger.debug(f"action listener starting on PID: {process.pid}")
+
+ return process
+ except Exception as e:
+ logger.error(f"failed to start action listener: {e}")
+ sys.exit(1)
+
+ async def _check_listener_health(self) -> None:
+ logger.debug("starting action listener health check...")
+ try:
+ while not self.killing:
+ if (
+ self.action_listener_process is None
+ or not self.action_listener_process.is_alive()
+ ):
+ logger.debug("child action listener process killed...")
+ self._status = WorkerStatus.UNHEALTHY
+ if not self.killing:
+ self.loop.create_task(self.exit_gracefully())
+ break
+ else:
+ self._status = WorkerStatus.HEALTHY
+ await asyncio.sleep(1)
+ except Exception as e:
+ logger.error(f"error checking listener health: {e}")
+
+ ## Cleanup methods
+ def _setup_signal_handlers(self) -> None:
+ signal.signal(signal.SIGTERM, self._handle_exit_signal)
+ signal.signal(signal.SIGINT, self._handle_exit_signal)
+ signal.signal(signal.SIGQUIT, self._handle_force_quit_signal)
+
+ def _handle_exit_signal(self, signum: int, frame: FrameType | None) -> None:
+ sig_name = "SIGTERM" if signum == signal.SIGTERM else "SIGINT"
+ logger.info(f"received signal {sig_name}...")
+ self.loop.create_task(self.exit_gracefully())
+
+ def _handle_force_quit_signal(self, signum: int, frame: FrameType | None) -> None:
+ logger.info("received SIGQUIT...")
+ self.exit_forcefully()
+
+ async def close(self) -> None:
+ logger.info(f"closing worker '{self.name}'...")
+ self.killing = True
+ # self.action_queue.close()
+ # self.event_queue.close()
+
+ if self.action_runner is not None:
+ self.action_runner.cleanup()
+
+ await self.action_listener_health_check
+
+ async def exit_gracefully(self) -> None:
+ logger.debug(f"gracefully stopping worker: {self.name}")
+
+ if self.killing:
+ return self.exit_forcefully()
+
+ self.killing = True
+
+ await self.action_runner.wait_for_tasks()
+
+ await self.action_runner.exit_gracefully()
+
+ if self.action_listener_process and self.action_listener_process.is_alive():
+ self.action_listener_process.kill()
+
+ await self.close()
+ if self.loop and self.owned_loop:
+ self.loop.stop()
+
+ logger.info("👋")
+
+ def exit_forcefully(self) -> None:
+ self.killing = True
+
+ logger.debug(f"forcefully stopping worker: {self.name}")
+
+ self.close()
+
+ if self.action_listener_process:
+ self.action_listener_process.kill() # Forcefully kill the process
+
+ logger.info("👋")
+ sys.exit(
+ 1
+ ) # Exit immediately TODO - should we exit with 1 here, there may be other workers to cleanup
+
+
+def register_on_worker(callable: HatchetCallable[T], worker: Worker) -> None:
+ worker.register_function(callable.get_action_name(), callable)
+
+ if callable.function_on_failure is not None:
+ worker.register_function(
+ callable.function_on_failure.get_action_name(), callable.function_on_failure
+ )
+
+ if callable.function_concurrency is not None:
+ worker.register_function(
+ callable.function_concurrency.get_action_name(),
+ callable.function_concurrency,
+ )
+
+ opts = callable.to_workflow_opts()
+
+ worker.register_workflow_from_opts(opts.name, opts)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/workflow.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/workflow.py
new file mode 100644
index 00000000..9c5cef90
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/workflow.py
@@ -0,0 +1,261 @@
+import functools
+from typing import (
+ Any,
+ Callable,
+ Protocol,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+ get_type_hints,
+ runtime_checkable,
+)
+
+from pydantic import BaseModel
+
+from hatchet_sdk import ConcurrencyLimitStrategy
+from hatchet_sdk.contracts.workflows_pb2 import (
+ CreateWorkflowJobOpts,
+ CreateWorkflowStepOpts,
+ CreateWorkflowVersionOpts,
+ StickyStrategy,
+ WorkflowConcurrencyOpts,
+ WorkflowKind,
+)
+from hatchet_sdk.logger import logger
+from hatchet_sdk.utils.typing import is_basemodel_subclass
+
+
+class WorkflowStepProtocol(Protocol):
+ def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
+
+ __name__: str
+
+ _step_name: str
+ _step_timeout: str | None
+ _step_parents: list[str]
+ _step_retries: int | None
+ _step_rate_limits: list[str] | None
+ _step_desired_worker_labels: dict[str, str]
+ _step_backoff_factor: float | None
+ _step_backoff_max_seconds: int | None
+
+ _concurrency_fn_name: str
+ _concurrency_max_runs: int | None
+ _concurrency_limit_strategy: str | None
+
+ _on_failure_step_name: str
+ _on_failure_step_timeout: str | None
+ _on_failure_step_retries: int
+ _on_failure_step_rate_limits: list[str] | None
+ _on_failure_step_backoff_factor: float | None
+ _on_failure_step_backoff_max_seconds: int | None
+
+
+StepsType = list[tuple[str, WorkflowStepProtocol]]
+
+T = TypeVar("T")
+TW = TypeVar("TW", bound="WorkflowInterface")
+
+
+class ConcurrencyExpression:
+ """
+ Defines concurrency limits for a workflow using a CEL expression.
+
+ Args:
+ expression (str): CEL expression to determine concurrency grouping. (i.e. "input.user_id")
+ max_runs (int): Maximum number of concurrent workflow runs.
+ limit_strategy (ConcurrencyLimitStrategy): Strategy for handling limit violations.
+
+ Example:
+ ConcurrencyExpression("input.user_id", 5, ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS)
+ """
+
+ def __init__(
+ self, expression: str, max_runs: int, limit_strategy: ConcurrencyLimitStrategy
+ ):
+ self.expression = expression
+ self.max_runs = max_runs
+ self.limit_strategy = limit_strategy
+
+
+@runtime_checkable
+class WorkflowInterface(Protocol):
+ def get_name(self, namespace: str) -> str: ...
+
+ def get_actions(self, namespace: str) -> list[tuple[str, Callable[..., Any]]]: ...
+
+ def get_create_opts(self, namespace: str) -> Any: ...
+
+ on_events: list[str] | None
+ on_crons: list[str] | None
+ name: str
+ version: str
+ timeout: str
+ schedule_timeout: str
+ sticky: Union[StickyStrategy.Value, None] # type: ignore[name-defined]
+ default_priority: int | None
+ concurrency_expression: ConcurrencyExpression | None
+ input_validator: Type[BaseModel] | None
+
+
+class WorkflowMeta(type):
+ def __new__(
+ cls: Type["WorkflowMeta"],
+ name: str,
+ bases: tuple[type, ...],
+ attrs: dict[str, Any],
+ ) -> "WorkflowMeta":
+ def _create_steps_actions_list(name: str) -> StepsType:
+ return [
+ (getattr(func, name), attrs.pop(func_name))
+ for func_name, func in list(attrs.items())
+ if hasattr(func, name)
+ ]
+
+ concurrencyActions = _create_steps_actions_list("_concurrency_fn_name")
+ steps = _create_steps_actions_list("_step_name")
+
+ onFailureSteps = _create_steps_actions_list("_on_failure_step_name")
+
+ # Define __init__ and get_step_order methods
+ original_init = attrs.get("__init__") # Get the original __init__ if it exists
+
+ def __init__(self: TW, *args: Any, **kwargs: Any) -> None:
+ if original_init:
+ original_init(self, *args, **kwargs) # Call original __init__
+
+ def get_service_name(namespace: str) -> str:
+ return f"{namespace}{name.lower()}"
+
+ @functools.cache
+ def get_actions(self: TW, namespace: str) -> StepsType:
+ serviceName = get_service_name(namespace)
+
+ func_actions = [
+ (serviceName + ":" + func_name, func) for func_name, func in steps
+ ]
+ concurrency_actions = [
+ (serviceName + ":" + func_name, func)
+ for func_name, func in concurrencyActions
+ ]
+ onFailure_actions = [
+ (serviceName + ":" + func_name, func)
+ for func_name, func in onFailureSteps
+ ]
+
+ return func_actions + concurrency_actions + onFailure_actions
+
+ # Add these methods and steps to class attributes
+ attrs["__init__"] = __init__
+ attrs["get_actions"] = get_actions
+
+ for step_name, step_func in steps:
+ attrs[step_name] = step_func
+
+ def get_name(self: TW, namespace: str) -> str:
+ return namespace + cast(str, attrs["name"])
+
+ attrs["get_name"] = get_name
+
+ cron_triggers = attrs["on_crons"]
+ version = attrs["version"]
+ schedule_timeout = attrs["schedule_timeout"]
+ sticky = attrs["sticky"]
+ default_priority = attrs["default_priority"]
+
+ @functools.cache
+ def get_create_opts(self: TW, namespace: str) -> CreateWorkflowVersionOpts:
+ serviceName = get_service_name(namespace)
+ name = self.get_name(namespace)
+ event_triggers = [namespace + event for event in attrs["on_events"]]
+ createStepOpts: list[CreateWorkflowStepOpts] = [
+ CreateWorkflowStepOpts(
+ readable_id=step_name,
+ action=serviceName + ":" + step_name,
+ timeout=func._step_timeout or "60s",
+ inputs="{}",
+ parents=[x for x in func._step_parents],
+ retries=func._step_retries,
+ rate_limits=func._step_rate_limits, # type: ignore[arg-type]
+ worker_labels=func._step_desired_worker_labels, # type: ignore[arg-type]
+ backoff_factor=func._step_backoff_factor,
+ backoff_max_seconds=func._step_backoff_max_seconds,
+ )
+ for step_name, func in steps
+ ]
+
+ concurrency: WorkflowConcurrencyOpts | None = None
+
+ if len(concurrencyActions) > 0:
+ action = concurrencyActions[0]
+
+ concurrency = WorkflowConcurrencyOpts(
+ action=serviceName + ":" + action[0],
+ max_runs=action[1]._concurrency_max_runs,
+ limit_strategy=action[1]._concurrency_limit_strategy,
+ )
+
+ if self.concurrency_expression:
+ concurrency = WorkflowConcurrencyOpts(
+ expression=self.concurrency_expression.expression,
+ max_runs=self.concurrency_expression.max_runs,
+ limit_strategy=self.concurrency_expression.limit_strategy,
+ )
+
+ if len(concurrencyActions) > 0 and self.concurrency_expression:
+ raise ValueError(
+ "Error: Both concurrencyActions and concurrency_expression are defined. Please use only one concurrency configuration method."
+ )
+
+ on_failure_job: CreateWorkflowJobOpts | None = None
+
+ if len(onFailureSteps) > 0:
+ func_name, func = onFailureSteps[0]
+ on_failure_job = CreateWorkflowJobOpts(
+ name=name + "-on-failure",
+ steps=[
+ CreateWorkflowStepOpts(
+ readable_id=func_name,
+ action=serviceName + ":" + func_name,
+ timeout=func._on_failure_step_timeout or "60s",
+ inputs="{}",
+ parents=[],
+ retries=func._on_failure_step_retries,
+ rate_limits=func._on_failure_step_rate_limits, # type: ignore[arg-type]
+ backoff_factor=func._on_failure_step_backoff_factor,
+ backoff_max_seconds=func._on_failure_step_backoff_max_seconds,
+ )
+ ],
+ )
+
+ validated_priority = (
+ max(1, min(3, default_priority)) if default_priority else None
+ )
+ if validated_priority != default_priority:
+ logger.warning(
+ "Warning: Default Priority Must be between 1 and 3 -- inclusively. Adjusted to be within the range."
+ )
+
+ return CreateWorkflowVersionOpts(
+ name=name,
+ kind=WorkflowKind.DAG,
+ version=version,
+ event_triggers=event_triggers,
+ cron_triggers=cron_triggers,
+ schedule_timeout=schedule_timeout,
+ sticky=sticky,
+ jobs=[
+ CreateWorkflowJobOpts(
+ name=name,
+ steps=createStepOpts,
+ )
+ ],
+ on_failure_job=on_failure_job,
+ concurrency=concurrency,
+ default_priority=validated_priority,
+ )
+
+ attrs["get_create_opts"] = get_create_opts
+
+ return super(WorkflowMeta, cls).__new__(cls, name, bases, attrs)
diff --git a/.venv/lib/python3.12/site-packages/hatchet_sdk/workflow_run.py b/.venv/lib/python3.12/site-packages/hatchet_sdk/workflow_run.py
new file mode 100644
index 00000000..064f6741
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/hatchet_sdk/workflow_run.py
@@ -0,0 +1,59 @@
+import asyncio
+from typing import Any, Coroutine, Generic, Optional, TypedDict, TypeVar
+
+from hatchet_sdk.clients.run_event_listener import (
+ RunEventListener,
+ RunEventListenerClient,
+)
+from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener
+from hatchet_sdk.utils.aio_utils import EventLoopThread, get_active_event_loop
+
+
+class WorkflowRunRef:
+ workflow_run_id: str
+
+ def __init__(
+ self,
+ workflow_run_id: str,
+ workflow_listener: PooledWorkflowRunListener,
+ workflow_run_event_listener: RunEventListenerClient,
+ ):
+ self.workflow_run_id = workflow_run_id
+ self.workflow_listener = workflow_listener
+ self.workflow_run_event_listener = workflow_run_event_listener
+
+ def __str__(self):
+ return self.workflow_run_id
+
+ def stream(self) -> RunEventListener:
+ return self.workflow_run_event_listener.stream(self.workflow_run_id)
+
+ def result(self) -> Coroutine:
+ return self.workflow_listener.result(self.workflow_run_id)
+
+ def sync_result(self) -> dict:
+ coro = self.workflow_listener.result(self.workflow_run_id)
+ loop = get_active_event_loop()
+
+ if loop is None:
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+ try:
+ return loop.run_until_complete(coro)
+ finally:
+ asyncio.set_event_loop(None)
+ else:
+ return loop.run_until_complete(coro)
+
+
+T = TypeVar("T")
+
+
+class RunRef(WorkflowRunRef, Generic[T]):
+ async def result(self) -> T:
+ res = await self.workflow_listener.result(self.workflow_run_id)
+
+ if len(res) == 1:
+ return list(res.values())[0]
+
+ return res