diff --git a/examples/workflow/README.md b/examples/workflow/README.md index 9d37a110..17405670 100644 --- a/examples/workflow/README.md +++ b/examples/workflow/README.md @@ -536,3 +536,38 @@ timeout_seconds: 60 dapr run --app-id wf-pydantic-example -- python3 pydantic_models.py ``` + +### History Propagation + +This example demonstrates how a parent workflow can propagate its execution +history to a child workflow and to an activity, and how the receivers query +that history through `ctx.get_propagated_history()`. + +It shows: +- `propagation=PropagationScope.OWN_HISTORY` on a child workflow call — + forwards the caller's events only. +- `propagation=PropagationScope.LINEAGE` on an activity call — forwards the + caller's events *plus* anything the caller itself received from its parent. +- `PropagatedHistory.get_workflow_by_name(...)` and `WorkflowResult.get_activity_by_name(...)` + on the receiving side. + +> **Requires** a Dapr sidecar with workflow history propagation support +> (durabletask-go PR #85 / runtime 1.18+ ). With an older sidecar the +> propagation field is silently dropped and `get_propagated_history()` +> returns `None`. + + + +```sh +dapr run --app-id workflow-history-propagation -- python3 history_propagation.py +``` + diff --git a/examples/workflow/history_propagation.py b/examples/workflow/history_propagation.py new file mode 100644 index 00000000..76e60056 --- /dev/null +++ b/examples/workflow/history_propagation.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""History propagation example. + +The parent workflow runs a couple of activities, then calls a child workflow +with ``propagation=PropagationScope.OWN_HISTORY`` and an activity with +``propagation=PropagationScope.LINEAGE``. The child workflow and the +downstream activity read the parent's recorded history via +``ctx.get_propagated_history()`` and inspect specific events by name. + +This requires a Dapr sidecar built with history propagation enabled +(durabletask-go PR #85 and later). With an older sidecar, the propagation +field is silently dropped and ``get_propagated_history()`` returns ``None``. +""" + +from __future__ import annotations + +import json + +import dapr.ext.workflow as wf + +wfr = wf.WorkflowRuntime() + + +@wfr.activity(name='validate_merchant') +def validate_merchant(ctx: wf.WorkflowActivityContext, merchant_id: str) -> dict: + print(f'*** validating merchant {merchant_id}', flush=True) + return {'merchant_id': merchant_id, 'valid': True} + + +@wfr.activity(name='log_summary') +def log_summary(ctx: wf.WorkflowActivityContext, _: None) -> str: + """Activity that reads the parent workflow's propagated history.""" + history = ctx.get_propagated_history() + if history is None: + print('*** log_summary: no propagated history (sidecar may not support it)', flush=True) + return 'no-history' + + workflows = history.get_workflows() + if not workflows: + print('*** log_summary: propagated history has no workflows', flush=True) + return 'empty-history' + + parent = workflows[-1] + try: + validate = parent.get_activity_by_name('validate_merchant') + except wf.PropagationNotFoundError: + print('*** log_summary: parent did not run validate_merchant', flush=True) + return 'parent-missing-validate' + + print( + f'*** log_summary saw parent on app {parent.app_id} ' + f'with validate_merchant -> completed={validate.completed} output={validate.output}', + flush=True, + ) + return 'logged' + + +@wfr.workflow(name='process_payment') +def process_payment(ctx: wf.DaprWorkflowContext, _: None): + """Child workflow: introspect the parent's history before deciding.""" + history = ctx.get_propagated_history() + if history is None: + print('*** process_payment: no propagated history', flush=True) + return 'no-history' + + workflows = history.get_workflows() + if not workflows: + print('*** process_payment: propagated history has no workflows', flush=True) + return 'empty-history' + + parent = workflows[-1] + try: + validate = parent.get_activity_by_name('validate_merchant') + except wf.PropagationNotFoundError: + print('*** process_payment: parent did not run validate_merchant', flush=True) + return 'parent-missing-validate' + + if not validate.completed: + print('*** process_payment: parent validate_merchant is not complete yet', flush=True) + return 'parent-incomplete' + + merchant = json.loads(validate.output or '{}') + print( + f'*** process_payment received parent context for merchant {merchant.get("merchant_id")!r}', + flush=True, + ) + return 'paid' + + +@wfr.workflow(name='merchant_checkout') +def merchant_checkout(ctx: wf.DaprWorkflowContext, merchant_id: str): + """Parent workflow: runs an activity, then propagates its history.""" + yield ctx.call_activity(validate_merchant, input=merchant_id) + + child_result = yield ctx.call_child_workflow( + process_payment, + input=None, + propagation=wf.PropagationScope.OWN_HISTORY, + ) + print(f'*** child workflow result: {child_result}', flush=True) + + audit = yield ctx.call_activity( + log_summary, + input=None, + propagation=wf.PropagationScope.LINEAGE, + ) + print(f'*** audit activity result: {audit}', flush=True) + return {'child': child_result, 'audit': audit} + + +if __name__ == '__main__': + wfr.start() + + wf_client = wf.DaprWorkflowClient() + instance_id = wf_client.schedule_new_workflow(workflow=merchant_checkout, input='merchant-42') + + state = wf_client.wait_for_workflow_completion(instance_id, timeout_in_seconds=30) + print( + f'*** workflow completed: status={state.runtime_status.name} output={state.serialized_output}', + flush=True, + ) + + wfr.shutdown() diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py b/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py index ef8e082e..bce18f4e 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py @@ -17,6 +17,14 @@ from dapr.ext.workflow._durabletask.task import TaskFailedError from dapr.ext.workflow.dapr_workflow_client import DaprWorkflowClient from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext, when_all, when_any +from dapr.ext.workflow.propagation import ( + ActivityResult, + ChildWorkflowResult, + PropagatedHistory, + PropagationNotFoundError, + PropagationScope, + WorkflowResult, +) from dapr.ext.workflow.retry_policy import RetryPolicy from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext from dapr.ext.workflow.workflow_runtime import WorkflowRuntime, alternate_name @@ -34,4 +42,10 @@ 'alternate_name', 'RetryPolicy', 'TaskFailedError', + 'PropagationScope', + 'PropagatedHistory', + 'PropagationNotFoundError', + 'WorkflowResult', + 'ActivityResult', + 'ChildWorkflowResult', ] diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/attestation_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/attestation_pb2.py new file mode 100644 index 00000000..ec67ccda --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/attestation_pb2.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: attestation.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'attestation.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x61ttestation.proto\"\xcf\x01\n!ChildCompletionAttestationPayload\x12\x18\n\x10parentInstanceId\x18\x01 \x01(\t\x12\x1d\n\x15parentTaskScheduledId\x18\x02 \x01(\x05\x12\x10\n\x08ioDigest\x18\x03 \x01(\x0c\x12\x18\n\x10signerCertDigest\x18\x04 \x01(\x0c\x12\'\n\x0eterminalStatus\x18\x05 \x01(\x0e\x32\x0f.TerminalStatus\x12\x1c\n\x14\x63\x61nonicalSpecVersion\x18\x06 \x01(\r\"@\n\x1a\x43hildCompletionAttestation\x12\x0f\n\x07payload\x18\x01 \x01(\x0c\x12\x11\n\tsignature\x18\x02 \x01(\x0c\"\xf0\x01\n$ActivityCompletionAttestationPayload\x12\x18\n\x10parentInstanceId\x18\x01 \x01(\t\x12\x1d\n\x15parentTaskScheduledId\x18\x02 \x01(\x05\x12\x14\n\x0c\x61\x63tivityName\x18\x03 \x01(\t\x12\x10\n\x08ioDigest\x18\x04 \x01(\x0c\x12\x18\n\x10signerCertDigest\x18\x05 \x01(\x0c\x12/\n\x0eterminalStatus\x18\x06 \x01(\x0e\x32\x17.ActivityTerminalStatus\x12\x1c\n\x14\x63\x61nonicalSpecVersion\x18\x07 \x01(\r\"C\n\x1d\x41\x63tivityCompletionAttestation\x12\x0f\n\x07payload\x18\x01 \x01(\x0c\x12\x11\n\tsignature\x18\x02 \x01(\x0c\"A\n\x1a\x45xternalSigningCertificate\x12\x0e\n\x06\x64igest\x18\x01 \x01(\x0c\x12\x13\n\x0b\x63\x65rtificate\x18\x02 \x01(\x0c*l\n\x0eTerminalStatus\x12\x1f\n\x1bTERMINAL_STATUS_UNSPECIFIED\x10\x00\x12\x1d\n\x19TERMINAL_STATUS_COMPLETED\x10\x01\x12\x1a\n\x16TERMINAL_STATUS_FAILED\x10\x02*\x8f\x01\n\x16\x41\x63tivityTerminalStatus\x12(\n$ACTIVITY_TERMINAL_STATUS_UNSPECIFIED\x10\x00\x12&\n\"ACTIVITY_TERMINAL_STATUS_COMPLETED\x10\x01\x12#\n\x1f\x41\x43TIVITY_TERMINAL_STATUS_FAILED\x10\x02\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'dapr.ext.workflow._durabletask.internal.attestation_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n+io.dapr.durabletask.implementation.protobufZ\013/api/protos\252\002\031Dapr.DurableTask.Protobuf' + _globals['_TERMINALSTATUS']._serialized_start=676 + _globals['_TERMINALSTATUS']._serialized_end=784 + _globals['_ACTIVITYTERMINALSTATUS']._serialized_start=787 + _globals['_ACTIVITYTERMINALSTATUS']._serialized_end=930 + _globals['_CHILDCOMPLETIONATTESTATIONPAYLOAD']._serialized_start=22 + _globals['_CHILDCOMPLETIONATTESTATIONPAYLOAD']._serialized_end=229 + _globals['_CHILDCOMPLETIONATTESTATION']._serialized_start=231 + _globals['_CHILDCOMPLETIONATTESTATION']._serialized_end=295 + _globals['_ACTIVITYCOMPLETIONATTESTATIONPAYLOAD']._serialized_start=298 + _globals['_ACTIVITYCOMPLETIONATTESTATIONPAYLOAD']._serialized_end=538 + _globals['_ACTIVITYCOMPLETIONATTESTATION']._serialized_start=540 + _globals['_ACTIVITYCOMPLETIONATTESTATION']._serialized_end=607 + _globals['_EXTERNALSIGNINGCERTIFICATE']._serialized_start=609 + _globals['_EXTERNALSIGNINGCERTIFICATE']._serialized_end=674 +# @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/attestation_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/attestation_pb2.pyi new file mode 100644 index 00000000..a8665da2 --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/attestation_pb2.pyi @@ -0,0 +1,397 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2026 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +import builtins as _builtins +import sys +import typing as _typing + +if sys.version_info >= (3, 10): + from typing import TypeAlias as _TypeAlias +else: + from typing_extensions import TypeAlias as _TypeAlias + +DESCRIPTOR: _descriptor.FileDescriptor + +class _TerminalStatus: + ValueType = _typing.NewType("ValueType", _builtins.int) + V: _TypeAlias = ValueType # noqa: Y015 + +class _TerminalStatusEnumTypeWrapper(_enum_type_wrapper._EnumTypeWrapper[_TerminalStatus.ValueType], _builtins.type): + DESCRIPTOR: _descriptor.EnumDescriptor + TERMINAL_STATUS_UNSPECIFIED: _TerminalStatus.ValueType # 0 + TERMINAL_STATUS_COMPLETED: _TerminalStatus.ValueType # 1 + TERMINAL_STATUS_FAILED: _TerminalStatus.ValueType # 2 + +class TerminalStatus(_TerminalStatus, metaclass=_TerminalStatusEnumTypeWrapper): + """============================================================================ + Canonical byte serialization for ioDigest + ============================================================================ + + ioDigest fields commit to an invocation's input and output. To stay stable + across protobuf versions, implementations, and languages, the bytes fed + into the digest are defined directly in terms of user-visible data — the + protobuf marshaler's output is never used. + + The digest is: + sha256( u64be(len(inputBytes)) || inputBytes || + u64be(len(outputBytes)) || outputBytes ) + where inputBytes and outputBytes come from the rules below. Length + prefixes are big-endian uint64 to prevent concatenation ambiguity (same + pattern as HistorySignature.eventsDigest). + + --- String normalization --- + All UTF-8 string bytes used below are first normalized to Unicode + Normalization Form C (NFC). Different-language SDKs (Go, .NET, Python, + Java, JS) may default to different Unicode normalization forms; NFC is + the web-standard canonical form and ensures that semantically equal + strings produce identical bytes regardless of which SDK emitted them. + Verifiers MUST NFC-normalize before hashing to compare against a + signer-produced digest. + + --- Input bytes (child workflows and activities) --- + Inputs are carried as google.protobuf.StringValue in the source event. + Canonical bytes: + wrapper unset: zero-length + wrapper set: nfc_utf8(string_value.value) + The StringValue envelope is not included — only the NFC-normalized + UTF-8 content of the value field. + + --- Output bytes, COMPLETED (child and activity) --- + Same rule as input: NFC-normalized UTF-8 bytes of the result + StringValue's value field, or zero-length if unset. + + --- Output bytes, FAILED (child and activity) --- + Spec-defined canonical serialization of TaskFailureDetails, independent + of protobuf wire format: + + u32be(len(errorType)) || nfc_utf8(errorType) + u32be(len(errorMessage)) || nfc_utf8(errorMessage) + u32be(len(stackTrace)) || nfc_utf8(stackTrace) // StringValue.value, + // zero-length if unset + u8(0 if innerFailure is unset, 1 if set) + if innerFailure is set: + + + Fields appear in this fixed order regardless of proto field numbers. + Unset string fields serialize as zero-length. No protobuf tags, varints, + or envelopes are emitted. The TaskFailureDetails.isNonRetriable field is + intentionally excluded — it is a framework retry-policy hint, not a + description of the failure, and committing to it would couple attestation + verification to retry-semantics evolution with no security benefit. + + The innerFailure recursion depth is capped (implementation-defined, at + least 32 levels). Chains exceeding the cap are treated as malformed: + the signer refuses to produce an attestation and the verifier refuses + to verify one. This bounds the work performed on attacker-controlled + input. + + --- Versioning --- + The canonicalSpecVersion field on each payload identifies which revision + of the rules above produced the attestation's ioDigest. Verifiers that + don't recognize the value reject the attestation rather than risk a + silent digest mismatch. Current value: 1. When TaskFailureDetails or + another canonicalized input grows an attestation-relevant field, the + spec is revised and canonicalSpecVersion is incremented — the rules + above are never silently changed. + + --- Certificate validity --- + Verifiers check that the signer certificate is valid at a specific + point in time. The correct choice of time depends on whether the + attestation is being verified at ingestion or from stored history: + + * Ingestion (parent absorbs an inbound attestation from a child/ + activity): use a trusted wallclock (time.Now()). The enclosing + HistoryEvent.timestamp is set by the sender and is not yet covered + by a signature at this point, so it cannot be trusted. Wallclock + gives "is the cert still valid right now" which is the right + freshness guarantee at the trust boundary. + + * Stored / propagated history (re-verification after the enclosing + event has been signed): use the enclosing HistoryEvent.timestamp. + Once the event is covered by a HistorySignature, the timestamp is + tamper-evident and provides a stable historical point-in-time for + cert validity — the cert was valid at signing time, even if it has + since expired. This mirrors how HistorySignature itself is checked + against the last event in its signed range. + + Terminal state of a child workflow at the moment of attestation. + """ + +TERMINAL_STATUS_UNSPECIFIED: TerminalStatus.ValueType # 0 +TERMINAL_STATUS_COMPLETED: TerminalStatus.ValueType # 1 +TERMINAL_STATUS_FAILED: TerminalStatus.ValueType # 2 +Global___TerminalStatus: _TypeAlias = TerminalStatus # noqa: Y015 + +class _ActivityTerminalStatus: + ValueType = _typing.NewType("ValueType", _builtins.int) + V: _TypeAlias = ValueType # noqa: Y015 + +class _ActivityTerminalStatusEnumTypeWrapper(_enum_type_wrapper._EnumTypeWrapper[_ActivityTerminalStatus.ValueType], _builtins.type): + DESCRIPTOR: _descriptor.EnumDescriptor + ACTIVITY_TERMINAL_STATUS_UNSPECIFIED: _ActivityTerminalStatus.ValueType # 0 + ACTIVITY_TERMINAL_STATUS_COMPLETED: _ActivityTerminalStatus.ValueType # 1 + ACTIVITY_TERMINAL_STATUS_FAILED: _ActivityTerminalStatus.ValueType # 2 + +class ActivityTerminalStatus(_ActivityTerminalStatus, metaclass=_ActivityTerminalStatusEnumTypeWrapper): + """Terminal state of an activity task at the moment of attestation. + Activities have no "terminate" operation, so the space is smaller than + TerminalStatus. + """ + +ACTIVITY_TERMINAL_STATUS_UNSPECIFIED: ActivityTerminalStatus.ValueType # 0 +ACTIVITY_TERMINAL_STATUS_COMPLETED: ActivityTerminalStatus.ValueType # 1 +ACTIVITY_TERMINAL_STATUS_FAILED: ActivityTerminalStatus.ValueType # 2 +Global___ActivityTerminalStatus: _TypeAlias = ActivityTerminalStatus # noqa: Y015 + +@_typing.final +class ChildCompletionAttestationPayload(_message.Message): + """Inner signed payload for a child workflow completion attestation. The + deterministically serialized form of this message is what the signer + signs over and what receivers verify against; the bytes are produced + once and never re-marshaled. + """ + + DESCRIPTOR: _descriptor.Descriptor + + PARENTINSTANCEID_FIELD_NUMBER: _builtins.int + PARENTTASKSCHEDULEDID_FIELD_NUMBER: _builtins.int + IODIGEST_FIELD_NUMBER: _builtins.int + SIGNERCERTDIGEST_FIELD_NUMBER: _builtins.int + TERMINALSTATUS_FIELD_NUMBER: _builtins.int + CANONICALSPECVERSION_FIELD_NUMBER: _builtins.int + parentInstanceId: _builtins.str + """Parent workflow instance ID. Binds the attestation to a single parent + run, preventing replay by other instances of the same parent workflow + that share a signing key. + """ + parentTaskScheduledId: _builtins.int + """taskScheduledId from the parent's ChildWorkflowInstanceCreatedEvent. + Unique within the parent instance; distinguishes multiple invocations + of the same child workflow. + """ + ioDigest: _builtins.bytes + """sha256 commitment to this invocation's input and output. The bytes + fed into the digest are produced by the canonical byte serialization + spec at the top of this file — not by any protobuf marshaler — so + the digest is stable across proto versions, implementations, and + languages. Use terminalStatus to select the output serialization rule + (COMPLETED or FAILED). + """ + signerCertDigest: _builtins.bytes + """sha256 of the DER-encoded X.509 certificate chain bytes of the + signer (leaf first, intermediates concatenated; same byte format + as the `certificate` field of SigningCertificate). Computed directly + over the DER bytes rather than any protobuf envelope, so the digest + is stable across protobuf version changes. The certificate itself + is carried as a companion field on the enclosing event on first + delivery and stored once in the receiver's external certificate + table (ext-sigcert-NNNNNN), looked up by this digest. + """ + terminalStatus: Global___TerminalStatus.ValueType + """Terminal state of the child workflow at the moment of attestation. + Signed so that a verifier reading the attestation from propagated + history can tell whether the child succeeded without relying on the + enclosing event type (Completed vs Failed), which may not be + visible or trustworthy when the attestation is inspected in + isolation. + """ + canonicalSpecVersion: _builtins.int + """Version of the canonical byte serialization spec used to compute + ioDigest. See the "Versioning" section of the spec block at the top + of this file. Verifiers that don't recognize the value reject the + attestation rather than risk a silent digest mismatch. Current + value: 1. + """ + def __init__( + self, + *, + parentInstanceId: _builtins.str = ..., + parentTaskScheduledId: _builtins.int = ..., + ioDigest: _builtins.bytes = ..., + signerCertDigest: _builtins.bytes = ..., + terminalStatus: Global___TerminalStatus.ValueType = ..., + canonicalSpecVersion: _builtins.int = ..., + ) -> None: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["canonicalSpecVersion", b"canonicalSpecVersion", "ioDigest", b"ioDigest", "parentInstanceId", b"parentInstanceId", "parentTaskScheduledId", b"parentTaskScheduledId", "signerCertDigest", b"signerCertDigest", "terminalStatus", b"terminalStatus"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + +Global___ChildCompletionAttestationPayload: _TypeAlias = ChildCompletionAttestationPayload # noqa: Y015 + +@_typing.final +class ChildCompletionAttestation(_message.Message): + """Signed wrapper around ChildCompletionAttestationPayload.""" + + DESCRIPTOR: _descriptor.Descriptor + + PAYLOAD_FIELD_NUMBER: _builtins.int + SIGNATURE_FIELD_NUMBER: _builtins.int + payload: _builtins.bytes + """Deterministically serialized form of ChildCompletionAttestationPayload + produced once by the signer. Opaque bytes thereafter; receivers, + storage layers, and verifiers never re-marshal. + """ + signature: _builtins.bytes + """Cryptographic signature over sha256(payload) using the private key + corresponding to the certificate whose digest is in the payload's + signerCertDigest field. Signature format follows the same rules as + HistorySignature.signature. + """ + def __init__( + self, + *, + payload: _builtins.bytes = ..., + signature: _builtins.bytes = ..., + ) -> None: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["payload", b"payload", "signature", b"signature"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + +Global___ChildCompletionAttestation: _TypeAlias = ChildCompletionAttestation # noqa: Y015 + +@_typing.final +class ActivityCompletionAttestationPayload(_message.Message): + """Inner signed payload for an activity completion attestation. Activities + have no signed history chain of their own (unlike child workflows), so + there is no finalSignatureDigest field. Activity identity is the hosting + app's SPIFFE identity; a compromised app can attest only to activities + it hosts, not to activities hosted on other apps. + """ + + DESCRIPTOR: _descriptor.Descriptor + + PARENTINSTANCEID_FIELD_NUMBER: _builtins.int + PARENTTASKSCHEDULEDID_FIELD_NUMBER: _builtins.int + ACTIVITYNAME_FIELD_NUMBER: _builtins.int + IODIGEST_FIELD_NUMBER: _builtins.int + SIGNERCERTDIGEST_FIELD_NUMBER: _builtins.int + TERMINALSTATUS_FIELD_NUMBER: _builtins.int + CANONICALSPECVERSION_FIELD_NUMBER: _builtins.int + parentInstanceId: _builtins.str + """Parent workflow instance ID that scheduled the activity.""" + parentTaskScheduledId: _builtins.int + """taskScheduledId from the parent's TaskScheduledEvent. Unique within + the parent instance. + """ + activityName: _builtins.str + """Activity name from the parent's TaskScheduledEvent. Explicit because + no separate creation event binds it in the parent's history the way + ChildWorkflowInstanceCreatedEvent does for child workflows. + """ + ioDigest: _builtins.bytes + """sha256 commitment to this invocation's input and output. See the + canonical byte serialization spec at the top of this file. Use + terminalStatus (ACTIVITY_TERMINAL_STATUS_COMPLETED or _FAILED) to + select the output serialization rule. + """ + signerCertDigest: _builtins.bytes + """sha256 of the DER-encoded X.509 certificate chain bytes of the + activity executor's signer. Same semantics and storage behavior as + ChildCompletionAttestationPayload.signerCertDigest. + """ + terminalStatus: Global___ActivityTerminalStatus.ValueType + """Terminal state of the activity at the moment of attestation.""" + canonicalSpecVersion: _builtins.int + """Version of the canonical byte serialization spec used to compute + ioDigest. See the "Versioning" section of the spec block at the top + of this file. Verifiers that don't recognize the value reject the + attestation rather than risk a silent digest mismatch. Current + value: 1. + """ + def __init__( + self, + *, + parentInstanceId: _builtins.str = ..., + parentTaskScheduledId: _builtins.int = ..., + activityName: _builtins.str = ..., + ioDigest: _builtins.bytes = ..., + signerCertDigest: _builtins.bytes = ..., + terminalStatus: Global___ActivityTerminalStatus.ValueType = ..., + canonicalSpecVersion: _builtins.int = ..., + ) -> None: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["activityName", b"activityName", "canonicalSpecVersion", b"canonicalSpecVersion", "ioDigest", b"ioDigest", "parentInstanceId", b"parentInstanceId", "parentTaskScheduledId", b"parentTaskScheduledId", "signerCertDigest", b"signerCertDigest", "terminalStatus", b"terminalStatus"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + +Global___ActivityCompletionAttestationPayload: _TypeAlias = ActivityCompletionAttestationPayload # noqa: Y015 + +@_typing.final +class ActivityCompletionAttestation(_message.Message): + """Signed wrapper around ActivityCompletionAttestationPayload.""" + + DESCRIPTOR: _descriptor.Descriptor + + PAYLOAD_FIELD_NUMBER: _builtins.int + SIGNATURE_FIELD_NUMBER: _builtins.int + payload: _builtins.bytes + """Deterministically serialized form of + ActivityCompletionAttestationPayload produced once by the signer. + Opaque bytes thereafter; receivers, storage layers, and verifiers + never re-marshal. + """ + signature: _builtins.bytes + """Cryptographic signature over sha256(payload).""" + def __init__( + self, + *, + payload: _builtins.bytes = ..., + signature: _builtins.bytes = ..., + ) -> None: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["payload", b"payload", "signature", b"signature"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + +Global___ActivityCompletionAttestation: _TypeAlias = ActivityCompletionAttestation # noqa: Y015 + +@_typing.final +class ExternalSigningCertificate(_message.Message): + """A foreign signer's X.509 certificate; one belonging to another workflow + instance or activity executor whose attestations this workflow has + received. Stored once per unique digest and referenced by digest from + any attestation embedded in history. Stored as individual actor state + keys: ext-sigcert-000000, ext-sigcert-000001, etc. + + Lifecycle mirrors SigningCertificate (monotonically appended within a + run, cleared on ContinueAsNew and instance purge, tracked by + BackendWorkflowStateMetadata.externalSigningCertificateLength). Dedup + within a run is performed by in-memory digest→index lookup built at + load time. + """ + + DESCRIPTOR: _descriptor.Descriptor + + DIGEST_FIELD_NUMBER: _builtins.int + CERTIFICATE_FIELD_NUMBER: _builtins.int + digest: _builtins.bytes + """sha256 of the DER-encoded X.509 certificate chain bytes (the value + in `certificate` below). Also the primary lookup key used by + attestations' signerCertDigest fields. Stored explicitly so + load-time index construction and post-load integrity checks do not + have to re-hash every entry. + """ + certificate: _builtins.bytes + """Same byte format as SigningCertificate.certificate: DER-encoded + X.509 chain, leaf first, intermediates concatenated. + """ + def __init__( + self, + *, + digest: _builtins.bytes = ..., + certificate: _builtins.bytes = ..., + ) -> None: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["certificate", b"certificate", "digest", b"digest"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + +Global___ExternalSigningCertificate: _TypeAlias = ExternalSigningCertificate # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/attestation_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/attestation_pb2_grpc.py new file mode 100644 index 00000000..4dbee3e0 --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/attestation_pb2_grpc.py @@ -0,0 +1,24 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +import warnings + + +GRPC_GENERATED_VERSION = '1.76.0' +GRPC_VERSION = grpc.__version__ +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + raise RuntimeError( + f'The grpc package installed is at version {GRPC_VERSION},' + + ' but the generated code in attestation_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + ) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.py index c969a3d2..26643c52 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.py @@ -28,7 +28,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15\x62\x61\x63kend_service.proto\x12\x1d\x64urabletask.protos.backend.v1\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"T\n\x0f\x41\x64\x64\x45ventRequest\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x1c\n\x05\x65vent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\x12\n\x10\x41\x64\x64\x45ventResponse\"`\n\x1f\x43ompleteActivityWorkItemRequest\x12\x17\n\x0f\x63ompletionToken\x18\x01 \x01(\t\x12$\n\rresponseEvent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\"\n CompleteActivityWorkItemResponse\"\xa4\x03\n\x1f\x43ompleteWorkflowWorkItemRequest\x12\x17\n\x0f\x63ompletionToken\x18\x01 \x01(\t\x12#\n\x08instance\x18\x02 \x01(\x0b\x32\x11.WorkflowInstance\x12+\n\rruntimeStatus\x18\x03 \x01(\x0e\x32\x14.OrchestrationStatus\x12\x32\n\x0c\x63ustomStatus\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12!\n\nnewHistory\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12\x1f\n\x08newTasks\x18\x06 \x03(\x0b\x32\r.HistoryEvent\x12 \n\tnewTimers\x18\x07 \x03(\x0b\x32\r.HistoryEvent\x12\x43\n\x0bnewMessages\x18\x08 \x03(\x0b\x32..durabletask.protos.backend.v1.WorkflowMessage\x12\x37\n\x12numEventsProcessed\x18\t \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\"\n CompleteWorkflowWorkItemResponse\"T\n\x0fWorkflowMessage\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x1c\n\x05\x65vent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\x9c\x01\n\x14\x42\x61\x63kendWorkflowState\x12\x1c\n\x05inbox\x18\x01 \x03(\x0b\x32\r.HistoryEvent\x12\x1e\n\x07history\x18\x02 \x03(\x0b\x32\r.HistoryEvent\x12\x32\n\x0c\x63ustomStatus\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x12\n\ngeneration\x18\x04 \x01(\x04\"P\n\x1d\x43reateWorkflowInstanceRequest\x12!\n\nstartEvent\x18\x01 \x01(\x0b\x32\r.HistoryEventJ\x04\x08\x02\x10\x03R\x06policy\"\xca\x03\n\x10WorkflowMetadata\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\rruntimeStatus\x18\x03 \x01(\x0e\x32\x14.OrchestrationStatus\x12-\n\tcreatedAt\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rlastUpdatedAt\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05input\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x06output\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ustomStatus\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\t \x01(\x0b\x32\x13.TaskFailureDetails\x12/\n\x0b\x63ompletedAt\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x18\n\x10parentInstanceId\x18\x0b \x01(\t\"\x99\x01\n\x1c\x42\x61\x63kendWorkflowStateMetadata\x12\x13\n\x0binboxLength\x18\x01 \x01(\x04\x12\x15\n\rhistoryLength\x18\x02 \x01(\x04\x12\x12\n\ngeneration\x18\x03 \x01(\x04\x12\x17\n\x0fsignatureLength\x18\x04 \x01(\x04\x12 \n\x18signingCertificateLength\x18\x05 \x01(\x04\")\n\x12SigningCertificate\x12\x13\n\x0b\x63\x65rtificate\x18\x01 \x01(\x0c\"\xc4\x01\n\x10HistorySignature\x12\x17\n\x0fstartEventIndex\x18\x01 \x01(\x04\x12\x12\n\neventCount\x18\x02 \x01(\x04\x12$\n\x17previousSignatureDigest\x18\x03 \x01(\x0cH\x00\x88\x01\x01\x12\x14\n\x0c\x65ventsDigest\x18\x04 \x01(\x0c\x12\x18\n\x10\x63\x65rtificateIndex\x18\x05 \x01(\x04\x12\x11\n\tsignature\x18\x06 \x01(\x0c\x42\x1a\n\x18_previousSignatureDigest\"E\n\x0c\x44urableTimer\x12!\n\ntimerEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x12\n\ngeneration\x18\x02 \x01(\x04\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15\x62\x61\x63kend_service.proto\x12\x1d\x64urabletask.protos.backend.v1\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"T\n\x0f\x41\x64\x64\x45ventRequest\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x1c\n\x05\x65vent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\x12\n\x10\x41\x64\x64\x45ventResponse\"`\n\x1f\x43ompleteActivityWorkItemRequest\x12\x17\n\x0f\x63ompletionToken\x18\x01 \x01(\t\x12$\n\rresponseEvent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\"\n CompleteActivityWorkItemResponse\"\xa4\x03\n\x1f\x43ompleteWorkflowWorkItemRequest\x12\x17\n\x0f\x63ompletionToken\x18\x01 \x01(\t\x12#\n\x08instance\x18\x02 \x01(\x0b\x32\x11.WorkflowInstance\x12+\n\rruntimeStatus\x18\x03 \x01(\x0e\x32\x14.OrchestrationStatus\x12\x32\n\x0c\x63ustomStatus\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12!\n\nnewHistory\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12\x1f\n\x08newTasks\x18\x06 \x03(\x0b\x32\r.HistoryEvent\x12 \n\tnewTimers\x18\x07 \x03(\x0b\x32\r.HistoryEvent\x12\x43\n\x0bnewMessages\x18\x08 \x03(\x0b\x32..durabletask.protos.backend.v1.WorkflowMessage\x12\x37\n\x12numEventsProcessed\x18\t \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\"\n CompleteWorkflowWorkItemResponse\"T\n\x0fWorkflowMessage\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x1c\n\x05\x65vent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\x9c\x01\n\x14\x42\x61\x63kendWorkflowState\x12\x1c\n\x05inbox\x18\x01 \x03(\x0b\x32\r.HistoryEvent\x12\x1e\n\x07history\x18\x02 \x03(\x0b\x32\r.HistoryEvent\x12\x32\n\x0c\x63ustomStatus\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x12\n\ngeneration\x18\x04 \x01(\x04\"\x83\x01\n\x12\x41\x63tivityInvocation\x12#\n\x0chistoryEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x32\n\x11propagatedHistory\x18\x02 \x01(\x0b\x32\x12.PropagatedHistoryH\x00\x88\x01\x01\x42\x14\n\x12_propagatedHistory\"\x9a\x01\n\x1d\x43reateWorkflowInstanceRequest\x12!\n\nstartEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x32\n\x11propagatedHistory\x18\x03 \x01(\x0b\x32\x12.PropagatedHistoryH\x00\x88\x01\x01\x42\x14\n\x12_propagatedHistoryJ\x04\x08\x02\x10\x03R\x06policy\"\x94\x05\n\x10WorkflowMetadata\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\rruntimeStatus\x18\x03 \x01(\x0e\x32\x14.OrchestrationStatus\x12-\n\tcreatedAt\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rlastUpdatedAt\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05input\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x06output\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ustomStatus\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\t \x01(\x0b\x32\x13.TaskFailureDetails\x12/\n\x0b\x63ompletedAt\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x18\n\x10parentInstanceId\x18\x0b \x01(\t\x12\x32\n\x07version\x18\x0c \x01(\x0b\x32\x1c.google.protobuf.StringValueH\x00\x88\x01\x01\x12\x36\n\x0bparentAppId\x18\r \x01(\x0b\x32\x1c.google.protobuf.StringValueH\x01\x88\x01\x01\x12\x32\n\tstartedAt\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x02\x88\x01\x01\x42\n\n\x08_versionB\x0e\n\x0c_parentAppIdB\x0c\n\n_startedAt\"\xc3\x01\n\x1c\x42\x61\x63kendWorkflowStateMetadata\x12\x13\n\x0binboxLength\x18\x01 \x01(\x04\x12\x15\n\rhistoryLength\x18\x02 \x01(\x04\x12\x12\n\ngeneration\x18\x03 \x01(\x04\x12\x17\n\x0fsignatureLength\x18\x04 \x01(\x04\x12 \n\x18signingCertificateLength\x18\x05 \x01(\x04\x12(\n externalSigningCertificateLength\x18\x06 \x01(\x04\")\n\x12SigningCertificate\x12\x13\n\x0b\x63\x65rtificate\x18\x01 \x01(\x0c\"\xc4\x01\n\x10HistorySignature\x12\x17\n\x0fstartEventIndex\x18\x01 \x01(\x04\x12\x12\n\neventCount\x18\x02 \x01(\x04\x12$\n\x17previousSignatureDigest\x18\x03 \x01(\x0cH\x00\x88\x01\x01\x12\x14\n\x0c\x65ventsDigest\x18\x04 \x01(\x0c\x12\x18\n\x10\x63\x65rtificateIndex\x18\x05 \x01(\x04\x12\x11\n\tsignature\x18\x06 \x01(\x0c\x42\x1a\n\x18_previousSignatureDigest\"E\n\x0c\x44urableTimer\x12!\n\ntimerEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x12\n\ngeneration\x18\x02 \x01(\x04\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -52,16 +52,18 @@ _globals['_WORKFLOWMESSAGE']._serialized_end=947 _globals['_BACKENDWORKFLOWSTATE']._serialized_start=950 _globals['_BACKENDWORKFLOWSTATE']._serialized_end=1106 - _globals['_CREATEWORKFLOWINSTANCEREQUEST']._serialized_start=1108 - _globals['_CREATEWORKFLOWINSTANCEREQUEST']._serialized_end=1188 - _globals['_WORKFLOWMETADATA']._serialized_start=1191 - _globals['_WORKFLOWMETADATA']._serialized_end=1649 - _globals['_BACKENDWORKFLOWSTATEMETADATA']._serialized_start=1652 - _globals['_BACKENDWORKFLOWSTATEMETADATA']._serialized_end=1805 - _globals['_SIGNINGCERTIFICATE']._serialized_start=1807 - _globals['_SIGNINGCERTIFICATE']._serialized_end=1848 - _globals['_HISTORYSIGNATURE']._serialized_start=1851 - _globals['_HISTORYSIGNATURE']._serialized_end=2047 - _globals['_DURABLETIMER']._serialized_start=2049 - _globals['_DURABLETIMER']._serialized_end=2118 + _globals['_ACTIVITYINVOCATION']._serialized_start=1109 + _globals['_ACTIVITYINVOCATION']._serialized_end=1240 + _globals['_CREATEWORKFLOWINSTANCEREQUEST']._serialized_start=1243 + _globals['_CREATEWORKFLOWINSTANCEREQUEST']._serialized_end=1397 + _globals['_WORKFLOWMETADATA']._serialized_start=1400 + _globals['_WORKFLOWMETADATA']._serialized_end=2060 + _globals['_BACKENDWORKFLOWSTATEMETADATA']._serialized_start=2063 + _globals['_BACKENDWORKFLOWSTATEMETADATA']._serialized_end=2258 + _globals['_SIGNINGCERTIFICATE']._serialized_start=2260 + _globals['_SIGNINGCERTIFICATE']._serialized_end=2301 + _globals['_HISTORYSIGNATURE']._serialized_start=2304 + _globals['_HISTORYSIGNATURE']._serialized_end=2500 + _globals['_DURABLETIMER']._serialized_start=2502 + _globals['_DURABLETIMER']._serialized_end=2571 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.pyi index 7b1a84de..08f6d2d5 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.pyi @@ -239,22 +239,63 @@ class BackendWorkflowState(_message.Message): Global___BackendWorkflowState: _TypeAlias = BackendWorkflowState # noqa: Y015 +@_typing.final +class ActivityInvocation(_message.Message): + """ActivityInvocation wraps a TaskScheduled HistoryEvent with optional + propagated history for delivery to an activity actor. + """ + + DESCRIPTOR: _descriptor.Descriptor + + HISTORYEVENT_FIELD_NUMBER: _builtins.int + PROPAGATEDHISTORY_FIELD_NUMBER: _builtins.int + @_builtins.property + def historyEvent(self) -> _history_events_pb2.HistoryEvent: ... + @_builtins.property + def propagatedHistory(self) -> _history_events_pb2.PropagatedHistory: + """Propagated history from the calling workflow.""" + + def __init__( + self, + *, + historyEvent: _history_events_pb2.HistoryEvent | None = ..., + propagatedHistory: _history_events_pb2.PropagatedHistory | None = ..., + ) -> None: ... + _HasFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "historyEvent", b"historyEvent", "propagatedHistory", b"propagatedHistory"] # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "historyEvent", b"historyEvent", "propagatedHistory", b"propagatedHistory"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__propagatedHistory: _TypeAlias = _typing.Literal["propagatedHistory"] # noqa: Y015 + _WhichOneofArgType__propagatedHistory: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory"] # noqa: Y015 + def WhichOneof(self, oneof_group: _WhichOneofArgType__propagatedHistory) -> _WhichOneofReturnType__propagatedHistory | None: ... + +Global___ActivityInvocation: _TypeAlias = ActivityInvocation # noqa: Y015 + @_typing.final class CreateWorkflowInstanceRequest(_message.Message): DESCRIPTOR: _descriptor.Descriptor STARTEVENT_FIELD_NUMBER: _builtins.int + PROPAGATEDHISTORY_FIELD_NUMBER: _builtins.int @_builtins.property def startEvent(self) -> _history_events_pb2.HistoryEvent: ... + @_builtins.property + def propagatedHistory(self) -> _history_events_pb2.PropagatedHistory: + """Propagated history from the parent workflow.""" + def __init__( self, *, startEvent: _history_events_pb2.HistoryEvent | None = ..., + propagatedHistory: _history_events_pb2.PropagatedHistory | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["startEvent", b"startEvent"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "propagatedHistory", b"propagatedHistory", "startEvent", b"startEvent"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["startEvent", b"startEvent"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "propagatedHistory", b"propagatedHistory", "startEvent", b"startEvent"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__propagatedHistory: _TypeAlias = _typing.Literal["propagatedHistory"] # noqa: Y015 + _WhichOneofArgType__propagatedHistory: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory"] # noqa: Y015 + def WhichOneof(self, oneof_group: _WhichOneofArgType__propagatedHistory) -> _WhichOneofReturnType__propagatedHistory | None: ... Global___CreateWorkflowInstanceRequest: _TypeAlias = CreateWorkflowInstanceRequest # noqa: Y015 @@ -273,6 +314,9 @@ class WorkflowMetadata(_message.Message): FAILUREDETAILS_FIELD_NUMBER: _builtins.int COMPLETEDAT_FIELD_NUMBER: _builtins.int PARENTINSTANCEID_FIELD_NUMBER: _builtins.int + VERSION_FIELD_NUMBER: _builtins.int + PARENTAPPID_FIELD_NUMBER: _builtins.int + STARTEDAT_FIELD_NUMBER: _builtins.int instanceId: _builtins.str name: _builtins.str runtimeStatus: _orchestration_pb2.OrchestrationStatus.ValueType @@ -291,6 +335,12 @@ class WorkflowMetadata(_message.Message): def failureDetails(self) -> _orchestration_pb2.TaskFailureDetails: ... @_builtins.property def completedAt(self) -> _timestamp_pb2.Timestamp: ... + @_builtins.property + def version(self) -> _wrappers_pb2.StringValue: ... + @_builtins.property + def parentAppId(self) -> _wrappers_pb2.StringValue: ... + @_builtins.property + def startedAt(self) -> _timestamp_pb2.Timestamp: ... def __init__( self, *, @@ -305,11 +355,26 @@ class WorkflowMetadata(_message.Message): failureDetails: _orchestration_pb2.TaskFailureDetails | None = ..., completedAt: _timestamp_pb2.Timestamp | None = ..., parentInstanceId: _builtins.str = ..., + version: _wrappers_pb2.StringValue | None = ..., + parentAppId: _wrappers_pb2.StringValue | None = ..., + startedAt: _timestamp_pb2.Timestamp | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["completedAt", b"completedAt", "createdAt", b"createdAt", "customStatus", b"customStatus", "failureDetails", b"failureDetails", "input", b"input", "lastUpdatedAt", b"lastUpdatedAt", "output", b"output"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_parentAppId", b"_parentAppId", "_startedAt", b"_startedAt", "_version", b"_version", "completedAt", b"completedAt", "createdAt", b"createdAt", "customStatus", b"customStatus", "failureDetails", b"failureDetails", "input", b"input", "lastUpdatedAt", b"lastUpdatedAt", "output", b"output", "parentAppId", b"parentAppId", "startedAt", b"startedAt", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["completedAt", b"completedAt", "createdAt", b"createdAt", "customStatus", b"customStatus", "failureDetails", b"failureDetails", "input", b"input", "instanceId", b"instanceId", "lastUpdatedAt", b"lastUpdatedAt", "name", b"name", "output", b"output", "parentInstanceId", b"parentInstanceId", "runtimeStatus", b"runtimeStatus"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_parentAppId", b"_parentAppId", "_startedAt", b"_startedAt", "_version", b"_version", "completedAt", b"completedAt", "createdAt", b"createdAt", "customStatus", b"customStatus", "failureDetails", b"failureDetails", "input", b"input", "instanceId", b"instanceId", "lastUpdatedAt", b"lastUpdatedAt", "name", b"name", "output", b"output", "parentAppId", b"parentAppId", "parentInstanceId", b"parentInstanceId", "runtimeStatus", b"runtimeStatus", "startedAt", b"startedAt", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__parentAppId: _TypeAlias = _typing.Literal["parentAppId"] # noqa: Y015 + _WhichOneofArgType__parentAppId: _TypeAlias = _typing.Literal["_parentAppId", b"_parentAppId"] # noqa: Y015 + _WhichOneofReturnType__startedAt: _TypeAlias = _typing.Literal["startedAt"] # noqa: Y015 + _WhichOneofArgType__startedAt: _TypeAlias = _typing.Literal["_startedAt", b"_startedAt"] # noqa: Y015 + _WhichOneofReturnType__version: _TypeAlias = _typing.Literal["version"] # noqa: Y015 + _WhichOneofArgType__version: _TypeAlias = _typing.Literal["_version", b"_version"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__parentAppId) -> _WhichOneofReturnType__parentAppId | None: ... + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__startedAt) -> _WhichOneofReturnType__startedAt | None: ... + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__version) -> _WhichOneofReturnType__version | None: ... Global___WorkflowMetadata: _TypeAlias = WorkflowMetadata # noqa: Y015 @@ -322,6 +387,7 @@ class BackendWorkflowStateMetadata(_message.Message): GENERATION_FIELD_NUMBER: _builtins.int SIGNATURELENGTH_FIELD_NUMBER: _builtins.int SIGNINGCERTIFICATELENGTH_FIELD_NUMBER: _builtins.int + EXTERNALSIGNINGCERTIFICATELENGTH_FIELD_NUMBER: _builtins.int inboxLength: _builtins.int historyLength: _builtins.int generation: _builtins.int @@ -329,6 +395,14 @@ class BackendWorkflowStateMetadata(_message.Message): """Number of HistorySignature entries stored (signature-NNNNNN keys).""" signingCertificateLength: _builtins.int """Number of SigningCertificate entries stored (sigcert-NNNNNN keys).""" + externalSigningCertificateLength: _builtins.int + """Number of ExternalSigningCertificate entries stored + (ext-sigcert-NNNNNN keys). Same lifecycle as signingCertificateLength: + monotonically grows within a run as new foreign signer certificates + are absorbed from incoming attestations, zeroed on ContinueAsNew, + cleared on instance purge. Subject to the same maxStateEntries + tampering bound. + """ def __init__( self, *, @@ -337,8 +411,9 @@ class BackendWorkflowStateMetadata(_message.Message): generation: _builtins.int = ..., signatureLength: _builtins.int = ..., signingCertificateLength: _builtins.int = ..., + externalSigningCertificateLength: _builtins.int = ..., ) -> None: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["generation", b"generation", "historyLength", b"historyLength", "inboxLength", b"inboxLength", "signatureLength", b"signatureLength", "signingCertificateLength", b"signingCertificateLength"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["externalSigningCertificateLength", b"externalSigningCertificateLength", "generation", b"generation", "historyLength", b"historyLength", "inboxLength", b"inboxLength", "signatureLength", b"signatureLength", "signingCertificateLength", b"signingCertificateLength"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... Global___BackendWorkflowStateMetadata: _TypeAlias = BackendWorkflowStateMetadata # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/helpers.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/helpers.py index 70c41c60..ae322dd6 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/helpers.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/helpers.py @@ -197,15 +197,19 @@ def new_schedule_task_action( encoded_input: Optional[str], router: Optional[pb.TaskRouter] = None, task_execution_id: str = '', + propagation_scope: Optional[int] = None, ) -> pb.WorkflowAction: + schedule = pb.ScheduleTaskAction( + name=name, + input=get_string_value(encoded_input), + router=router, + taskExecutionId=task_execution_id, + ) + if propagation_scope is not None: + schedule.historyPropagationScope = propagation_scope return pb.WorkflowAction( id=id, - scheduleTask=pb.ScheduleTaskAction( - name=name, - input=get_string_value(encoded_input), - router=router, - taskExecutionId=task_execution_id, - ), + scheduleTask=schedule, router=router, ) @@ -222,15 +226,19 @@ def new_create_child_workflow_action( instance_id: Optional[str], encoded_input: Optional[str], router: Optional[pb.TaskRouter] = None, + propagation_scope: Optional[int] = None, ) -> pb.WorkflowAction: + child = pb.CreateChildWorkflowAction( + name=name, + instanceId=instance_id, + input=get_string_value(encoded_input), + router=router, + ) + if propagation_scope is not None: + child.historyPropagationScope = propagation_scope return pb.WorkflowAction( id=id, - createChildWorkflow=pb.CreateChildWorkflowAction( - name=name, - instanceId=instance_id, - input=get_string_value(encoded_input), - router=router, - ), + createChildWorkflow=child, router=router, ) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.py index feb7e313..707c93d4 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.py @@ -23,11 +23,12 @@ from dapr.ext.workflow._durabletask.internal import orchestration_pb2 as orchestration__pb2 +from dapr.ext.workflow._durabletask.internal import attestation_pb2 as attestation__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14history_events.proto\x1a\x13orchestration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xd6\x03\n\x15\x45xecutionStartedEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12+\n\x0eparentInstance\x18\x05 \x01(\x0b\x32\x13.ParentInstanceInfo\x12;\n\x17scheduledStartTimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12)\n\x12parentTraceContext\x18\x07 \x01(\x0b\x32\r.TraceContext\x12\x34\n\x0eworkflowSpanID\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x04tags\x18\t \x03(\x0b\x32 .ExecutionStartedEvent.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa2\x01\n\x17\x45xecutionCompletedEvent\x12,\n\x0eworkflowStatus\x18\x01 \x01(\x0e\x32\x14.OrchestrationStatus\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x03 \x01(\x0b\x32\x13.TaskFailureDetails\"X\n\x18\x45xecutionTerminatedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x0f\n\x07recurse\x18\x02 \x01(\x08\"\x9e\x02\n\x12TaskScheduledEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x12parentTraceContext\x18\x04 \x01(\x0b\x32\r.TraceContext\x12\x17\n\x0ftaskExecutionId\x18\x05 \x01(\t\x12>\n\x17rerunParentInstanceInfo\x18\x06 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x00\x88\x01\x01\x42\x1a\n\x18_rerunParentInstanceInfo\"t\n\x12TaskCompletedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x17\n\x0ftaskExecutionId\x18\x03 \x01(\t\"p\n\x0fTaskFailedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12+\n\x0e\x66\x61ilureDetails\x18\x02 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x17\n\x0ftaskExecutionId\x18\x03 \x01(\t\"\xa8\x02\n!ChildWorkflowInstanceCreatedEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x12parentTraceContext\x18\x05 \x01(\x0b\x32\r.TraceContext\x12>\n\x17rerunParentInstanceInfo\x18\x06 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x00\x88\x01\x01\x42\x1a\n\x18_rerunParentInstanceInfo\"l\n#ChildWorkflowInstanceCompletedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"h\n ChildWorkflowInstanceFailedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12+\n\x0e\x66\x61ilureDetails\x18\x02 \x01(\x0b\x32\x13.TaskFailureDetails\"\x18\n\x16TimerOriginCreateTimer\"(\n\x18TimerOriginExternalEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\"3\n\x18TimerOriginActivityRetry\x12\x17\n\x0ftaskExecutionId\x18\x01 \x01(\t\"3\n\x1dTimerOriginChildWorkflowRetry\x12\x12\n\ninstanceId\x18\x01 \x01(\t\"\x97\x03\n\x11TimerCreatedEvent\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12>\n\x17rerunParentInstanceInfo\x18\x03 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x02\x88\x01\x01\x12.\n\x0b\x63reateTimer\x18\x04 \x01(\x0b\x32\x17.TimerOriginCreateTimerH\x00\x12\x32\n\rexternalEvent\x18\x05 \x01(\x0b\x32\x19.TimerOriginExternalEventH\x00\x12\x32\n\ractivityRetry\x18\x06 \x01(\x0b\x32\x19.TimerOriginActivityRetryH\x00\x12<\n\x12\x63hildWorkflowRetry\x18\x07 \x01(\x0b\x32\x1e.TimerOriginChildWorkflowRetryH\x00\x42\x08\n\x06originB\x07\n\x05_nameB\x1a\n\x18_rerunParentInstanceInfo\"N\n\x0fTimerFiredEvent\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07timerId\x18\x02 \x01(\x05\"J\n\x14WorkflowStartedEvent\x12&\n\x07version\x18\x01 \x01(\x0b\x32\x10.WorkflowVersionH\x00\x88\x01\x01\x42\n\n\x08_version\"\x18\n\x16WorkflowCompletedEvent\"_\n\x0e\x45ventSentEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"M\n\x10\x45ventRaisedEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05input\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"A\n\x12\x43ontinueAsNewEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"F\n\x17\x45xecutionSuspendedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"D\n\x15\x45xecutionResumedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"a\n\x15\x45xecutionStalledEvent\x12\x1e\n\x06reason\x18\x01 \x01(\x0e\x32\x0e.StalledReason\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_description\"\xa8\t\n\x0cHistoryEvent\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\x05\x12-\n\ttimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x10\x65xecutionStarted\x18\x03 \x01(\x0b\x32\x16.ExecutionStartedEventH\x00\x12\x36\n\x12\x65xecutionCompleted\x18\x04 \x01(\x0b\x32\x18.ExecutionCompletedEventH\x00\x12\x38\n\x13\x65xecutionTerminated\x18\x05 \x01(\x0b\x32\x19.ExecutionTerminatedEventH\x00\x12,\n\rtaskScheduled\x18\x06 \x01(\x0b\x32\x13.TaskScheduledEventH\x00\x12,\n\rtaskCompleted\x18\x07 \x01(\x0b\x32\x13.TaskCompletedEventH\x00\x12&\n\ntaskFailed\x18\x08 \x01(\x0b\x32\x10.TaskFailedEventH\x00\x12J\n\x1c\x63hildWorkflowInstanceCreated\x18\t \x01(\x0b\x32\".ChildWorkflowInstanceCreatedEventH\x00\x12N\n\x1e\x63hildWorkflowInstanceCompleted\x18\n \x01(\x0b\x32$.ChildWorkflowInstanceCompletedEventH\x00\x12H\n\x1b\x63hildWorkflowInstanceFailed\x18\x0b \x01(\x0b\x32!.ChildWorkflowInstanceFailedEventH\x00\x12*\n\x0ctimerCreated\x18\x0c \x01(\x0b\x32\x12.TimerCreatedEventH\x00\x12&\n\ntimerFired\x18\r \x01(\x0b\x32\x10.TimerFiredEventH\x00\x12\x30\n\x0fworkflowStarted\x18\x0e \x01(\x0b\x32\x15.WorkflowStartedEventH\x00\x12\x34\n\x11workflowCompleted\x18\x0f \x01(\x0b\x32\x17.WorkflowCompletedEventH\x00\x12$\n\teventSent\x18\x10 \x01(\x0b\x32\x0f.EventSentEventH\x00\x12(\n\x0b\x65ventRaised\x18\x11 \x01(\x0b\x32\x11.EventRaisedEventH\x00\x12,\n\rcontinueAsNew\x18\x14 \x01(\x0b\x32\x13.ContinueAsNewEventH\x00\x12\x36\n\x12\x65xecutionSuspended\x18\x15 \x01(\x0b\x32\x18.ExecutionSuspendedEventH\x00\x12\x32\n\x10\x65xecutionResumed\x18\x16 \x01(\x0b\x32\x16.ExecutionResumedEventH\x00\x12\x32\n\x10\x65xecutionStalled\x18\x1f \x01(\x0b\x32\x16.ExecutionStalledEventH\x00\x12 \n\x06router\x18\x1e \x01(\x0b\x32\x0b.TaskRouterH\x01\x88\x01\x01\x42\x0b\n\teventTypeB\t\n\x07_routerJ\x04\x08\x12\x10\x13J\x04\x08\x13\x10\x14J\x04\x08\x17\x10\x18J\x04\x08\x18\x10\x19J\x04\x08\x19\x10\x1aJ\x04\x08\x1a\x10\x1bJ\x04\x08\x1b\x10\x1cJ\x04\x08\x1c\x10\x1dJ\x04\x08\x1d\x10\x1e\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14history_events.proto\x1a\x13orchestration.proto\x1a\x11\x61ttestation.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xd6\x03\n\x15\x45xecutionStartedEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12+\n\x0eparentInstance\x18\x05 \x01(\x0b\x32\x13.ParentInstanceInfo\x12;\n\x17scheduledStartTimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12)\n\x12parentTraceContext\x18\x07 \x01(\x0b\x32\r.TraceContext\x12\x34\n\x0eworkflowSpanID\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x04tags\x18\t \x03(\x0b\x32 .ExecutionStartedEvent.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa2\x01\n\x17\x45xecutionCompletedEvent\x12,\n\x0eworkflowStatus\x18\x01 \x01(\x0e\x32\x14.OrchestrationStatus\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x03 \x01(\x0b\x32\x13.TaskFailureDetails\"X\n\x18\x45xecutionTerminatedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x0f\n\x07recurse\x18\x02 \x01(\x08\"\xfa\x02\n\x12TaskScheduledEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x12parentTraceContext\x18\x04 \x01(\x0b\x32\r.TraceContext\x12\x17\n\x0ftaskExecutionId\x18\x05 \x01(\t\x12>\n\x17rerunParentInstanceInfo\x18\x06 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x00\x88\x01\x01\x12>\n\x17historyPropagationScope\x18\x07 \x01(\x0e\x32\x18.HistoryPropagationScopeH\x01\x88\x01\x01\x42\x1a\n\x18_rerunParentInstanceInfoB\x1a\n\x18_historyPropagationScope\"\xf4\x01\n\x12TaskCompletedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x17\n\x0ftaskExecutionId\x18\x03 \x01(\t\x12\x38\n\x0b\x61ttestation\x18\x04 \x01(\x0b\x32\x1e.ActivityCompletionAttestationH\x00\x88\x01\x01\x12\x1e\n\x11signerCertificate\x18\x05 \x01(\x0cH\x01\x88\x01\x01\x42\x0e\n\x0c_attestationB\x14\n\x12_signerCertificate\"\xf0\x01\n\x0fTaskFailedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12+\n\x0e\x66\x61ilureDetails\x18\x02 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x17\n\x0ftaskExecutionId\x18\x03 \x01(\t\x12\x38\n\x0b\x61ttestation\x18\x04 \x01(\x0b\x32\x1e.ActivityCompletionAttestationH\x00\x88\x01\x01\x12\x1e\n\x11signerCertificate\x18\x05 \x01(\x0cH\x01\x88\x01\x01\x42\x0e\n\x0c_attestationB\x14\n\x12_signerCertificate\"\x84\x03\n!ChildWorkflowInstanceCreatedEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x12parentTraceContext\x18\x05 \x01(\x0b\x32\r.TraceContext\x12>\n\x17rerunParentInstanceInfo\x18\x06 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x00\x88\x01\x01\x12>\n\x17historyPropagationScope\x18\x07 \x01(\x0e\x32\x18.HistoryPropagationScopeH\x01\x88\x01\x01\x42\x1a\n\x18_rerunParentInstanceInfoB\x1a\n\x18_historyPropagationScope\"\xe9\x01\n#ChildWorkflowInstanceCompletedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x35\n\x0b\x61ttestation\x18\x03 \x01(\x0b\x32\x1b.ChildCompletionAttestationH\x00\x88\x01\x01\x12\x1e\n\x11signerCertificate\x18\x04 \x01(\x0cH\x01\x88\x01\x01\x42\x0e\n\x0c_attestationB\x14\n\x12_signerCertificate\"\xe5\x01\n ChildWorkflowInstanceFailedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12+\n\x0e\x66\x61ilureDetails\x18\x02 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x35\n\x0b\x61ttestation\x18\x03 \x01(\x0b\x32\x1b.ChildCompletionAttestationH\x00\x88\x01\x01\x12\x1e\n\x11signerCertificate\x18\x04 \x01(\x0cH\x01\x88\x01\x01\x42\x0e\n\x0c_attestationB\x14\n\x12_signerCertificate\":\n$DetachedWorkflowInstanceCreatedEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\"\x18\n\x16TimerOriginCreateTimer\"(\n\x18TimerOriginExternalEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\"3\n\x18TimerOriginActivityRetry\x12\x17\n\x0ftaskExecutionId\x18\x01 \x01(\t\"3\n\x1dTimerOriginChildWorkflowRetry\x12\x12\n\ninstanceId\x18\x01 \x01(\t\"\x97\x03\n\x11TimerCreatedEvent\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12>\n\x17rerunParentInstanceInfo\x18\x03 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x02\x88\x01\x01\x12.\n\x0b\x63reateTimer\x18\x04 \x01(\x0b\x32\x17.TimerOriginCreateTimerH\x00\x12\x32\n\rexternalEvent\x18\x05 \x01(\x0b\x32\x19.TimerOriginExternalEventH\x00\x12\x32\n\ractivityRetry\x18\x06 \x01(\x0b\x32\x19.TimerOriginActivityRetryH\x00\x12<\n\x12\x63hildWorkflowRetry\x18\x07 \x01(\x0b\x32\x1e.TimerOriginChildWorkflowRetryH\x00\x42\x08\n\x06originB\x07\n\x05_nameB\x1a\n\x18_rerunParentInstanceInfo\"N\n\x0fTimerFiredEvent\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07timerId\x18\x02 \x01(\x05\"J\n\x14WorkflowStartedEvent\x12&\n\x07version\x18\x01 \x01(\x0b\x32\x10.WorkflowVersionH\x00\x88\x01\x01\x42\n\n\x08_version\"\x18\n\x16WorkflowCompletedEvent\"_\n\x0e\x45ventSentEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"M\n\x10\x45ventRaisedEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05input\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"A\n\x12\x43ontinueAsNewEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"F\n\x17\x45xecutionSuspendedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"D\n\x15\x45xecutionResumedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"a\n\x15\x45xecutionStalledEvent\x12\x1e\n\x06reason\x18\x01 \x01(\x0e\x32\x0e.StalledReason\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_description\"\xfa\t\n\x0cHistoryEvent\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\x05\x12-\n\ttimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x10\x65xecutionStarted\x18\x03 \x01(\x0b\x32\x16.ExecutionStartedEventH\x00\x12\x36\n\x12\x65xecutionCompleted\x18\x04 \x01(\x0b\x32\x18.ExecutionCompletedEventH\x00\x12\x38\n\x13\x65xecutionTerminated\x18\x05 \x01(\x0b\x32\x19.ExecutionTerminatedEventH\x00\x12,\n\rtaskScheduled\x18\x06 \x01(\x0b\x32\x13.TaskScheduledEventH\x00\x12,\n\rtaskCompleted\x18\x07 \x01(\x0b\x32\x13.TaskCompletedEventH\x00\x12&\n\ntaskFailed\x18\x08 \x01(\x0b\x32\x10.TaskFailedEventH\x00\x12J\n\x1c\x63hildWorkflowInstanceCreated\x18\t \x01(\x0b\x32\".ChildWorkflowInstanceCreatedEventH\x00\x12N\n\x1e\x63hildWorkflowInstanceCompleted\x18\n \x01(\x0b\x32$.ChildWorkflowInstanceCompletedEventH\x00\x12H\n\x1b\x63hildWorkflowInstanceFailed\x18\x0b \x01(\x0b\x32!.ChildWorkflowInstanceFailedEventH\x00\x12*\n\x0ctimerCreated\x18\x0c \x01(\x0b\x32\x12.TimerCreatedEventH\x00\x12&\n\ntimerFired\x18\r \x01(\x0b\x32\x10.TimerFiredEventH\x00\x12\x30\n\x0fworkflowStarted\x18\x0e \x01(\x0b\x32\x15.WorkflowStartedEventH\x00\x12\x34\n\x11workflowCompleted\x18\x0f \x01(\x0b\x32\x17.WorkflowCompletedEventH\x00\x12$\n\teventSent\x18\x10 \x01(\x0b\x32\x0f.EventSentEventH\x00\x12(\n\x0b\x65ventRaised\x18\x11 \x01(\x0b\x32\x11.EventRaisedEventH\x00\x12,\n\rcontinueAsNew\x18\x14 \x01(\x0b\x32\x13.ContinueAsNewEventH\x00\x12\x36\n\x12\x65xecutionSuspended\x18\x15 \x01(\x0b\x32\x18.ExecutionSuspendedEventH\x00\x12\x32\n\x10\x65xecutionResumed\x18\x16 \x01(\x0b\x32\x16.ExecutionResumedEventH\x00\x12\x32\n\x10\x65xecutionStalled\x18\x1f \x01(\x0b\x32\x16.ExecutionStalledEventH\x00\x12P\n\x1f\x64\x65tachedWorkflowInstanceCreated\x18 \x01(\x0b\x32%.DetachedWorkflowInstanceCreatedEventH\x00\x12 \n\x06router\x18\x1e \x01(\x0b\x32\x0b.TaskRouterH\x01\x88\x01\x01\x42\x0b\n\teventTypeB\t\n\x07_routerJ\x04\x08\x12\x10\x13J\x04\x08\x13\x10\x14J\x04\x08\x17\x10\x18J\x04\x08\x18\x10\x19J\x04\x08\x19\x10\x1aJ\x04\x08\x1a\x10\x1bJ\x04\x08\x1b\x10\x1cJ\x04\x08\x1c\x10\x1dJ\x04\x08\x1d\x10\x1e\"\x96\x01\n\x16PropagatedHistoryChunk\x12\x11\n\trawEvents\x18\x01 \x03(\x0c\x12\r\n\x05\x61ppId\x18\x02 \x01(\t\x12\x12\n\ninstanceId\x18\x03 \x01(\t\x12\x14\n\x0cworkflowName\x18\x04 \x01(\t\x12\x15\n\rrawSignatures\x18\x05 \x03(\x0c\x12\x19\n\x11signingCertChains\x18\x06 \x03(\x0c\"e\n\x11PropagatedHistory\x12\'\n\x05scope\x18\x01 \x01(\x0e\x32\x18.HistoryPropagationScope\x12\'\n\x06\x63hunks\x18\x02 \x03(\x0b\x32\x17.PropagatedHistoryChunkBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -37,54 +38,60 @@ _globals['DESCRIPTOR']._serialized_options = b'\n+io.dapr.durabletask.implementation.protobufZ\013/api/protos\252\002\031Dapr.DurableTask.Protobuf' _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._loaded_options = None _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_options = b'8\001' - _globals['_EXECUTIONSTARTEDEVENT']._serialized_start=111 - _globals['_EXECUTIONSTARTEDEVENT']._serialized_end=581 - _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_start=538 - _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_end=581 - _globals['_EXECUTIONCOMPLETEDEVENT']._serialized_start=584 - _globals['_EXECUTIONCOMPLETEDEVENT']._serialized_end=746 - _globals['_EXECUTIONTERMINATEDEVENT']._serialized_start=748 - _globals['_EXECUTIONTERMINATEDEVENT']._serialized_end=836 - _globals['_TASKSCHEDULEDEVENT']._serialized_start=839 - _globals['_TASKSCHEDULEDEVENT']._serialized_end=1125 - _globals['_TASKCOMPLETEDEVENT']._serialized_start=1127 - _globals['_TASKCOMPLETEDEVENT']._serialized_end=1243 - _globals['_TASKFAILEDEVENT']._serialized_start=1245 - _globals['_TASKFAILEDEVENT']._serialized_end=1357 - _globals['_CHILDWORKFLOWINSTANCECREATEDEVENT']._serialized_start=1360 - _globals['_CHILDWORKFLOWINSTANCECREATEDEVENT']._serialized_end=1656 - _globals['_CHILDWORKFLOWINSTANCECOMPLETEDEVENT']._serialized_start=1658 - _globals['_CHILDWORKFLOWINSTANCECOMPLETEDEVENT']._serialized_end=1766 - _globals['_CHILDWORKFLOWINSTANCEFAILEDEVENT']._serialized_start=1768 - _globals['_CHILDWORKFLOWINSTANCEFAILEDEVENT']._serialized_end=1872 - _globals['_TIMERORIGINCREATETIMER']._serialized_start=1874 - _globals['_TIMERORIGINCREATETIMER']._serialized_end=1898 - _globals['_TIMERORIGINEXTERNALEVENT']._serialized_start=1900 - _globals['_TIMERORIGINEXTERNALEVENT']._serialized_end=1940 - _globals['_TIMERORIGINACTIVITYRETRY']._serialized_start=1942 - _globals['_TIMERORIGINACTIVITYRETRY']._serialized_end=1993 - _globals['_TIMERORIGINCHILDWORKFLOWRETRY']._serialized_start=1995 - _globals['_TIMERORIGINCHILDWORKFLOWRETRY']._serialized_end=2046 - _globals['_TIMERCREATEDEVENT']._serialized_start=2049 - _globals['_TIMERCREATEDEVENT']._serialized_end=2456 - _globals['_TIMERFIREDEVENT']._serialized_start=2458 - _globals['_TIMERFIREDEVENT']._serialized_end=2536 - _globals['_WORKFLOWSTARTEDEVENT']._serialized_start=2538 - _globals['_WORKFLOWSTARTEDEVENT']._serialized_end=2612 - _globals['_WORKFLOWCOMPLETEDEVENT']._serialized_start=2614 - _globals['_WORKFLOWCOMPLETEDEVENT']._serialized_end=2638 - _globals['_EVENTSENTEVENT']._serialized_start=2640 - _globals['_EVENTSENTEVENT']._serialized_end=2735 - _globals['_EVENTRAISEDEVENT']._serialized_start=2737 - _globals['_EVENTRAISEDEVENT']._serialized_end=2814 - _globals['_CONTINUEASNEWEVENT']._serialized_start=2816 - _globals['_CONTINUEASNEWEVENT']._serialized_end=2881 - _globals['_EXECUTIONSUSPENDEDEVENT']._serialized_start=2883 - _globals['_EXECUTIONSUSPENDEDEVENT']._serialized_end=2953 - _globals['_EXECUTIONRESUMEDEVENT']._serialized_start=2955 - _globals['_EXECUTIONRESUMEDEVENT']._serialized_end=3023 - _globals['_EXECUTIONSTALLEDEVENT']._serialized_start=3025 - _globals['_EXECUTIONSTALLEDEVENT']._serialized_end=3122 - _globals['_HISTORYEVENT']._serialized_start=3125 - _globals['_HISTORYEVENT']._serialized_end=4317 + _globals['_EXECUTIONSTARTEDEVENT']._serialized_start=130 + _globals['_EXECUTIONSTARTEDEVENT']._serialized_end=600 + _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_start=557 + _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_end=600 + _globals['_EXECUTIONCOMPLETEDEVENT']._serialized_start=603 + _globals['_EXECUTIONCOMPLETEDEVENT']._serialized_end=765 + _globals['_EXECUTIONTERMINATEDEVENT']._serialized_start=767 + _globals['_EXECUTIONTERMINATEDEVENT']._serialized_end=855 + _globals['_TASKSCHEDULEDEVENT']._serialized_start=858 + _globals['_TASKSCHEDULEDEVENT']._serialized_end=1236 + _globals['_TASKCOMPLETEDEVENT']._serialized_start=1239 + _globals['_TASKCOMPLETEDEVENT']._serialized_end=1483 + _globals['_TASKFAILEDEVENT']._serialized_start=1486 + _globals['_TASKFAILEDEVENT']._serialized_end=1726 + _globals['_CHILDWORKFLOWINSTANCECREATEDEVENT']._serialized_start=1729 + _globals['_CHILDWORKFLOWINSTANCECREATEDEVENT']._serialized_end=2117 + _globals['_CHILDWORKFLOWINSTANCECOMPLETEDEVENT']._serialized_start=2120 + _globals['_CHILDWORKFLOWINSTANCECOMPLETEDEVENT']._serialized_end=2353 + _globals['_CHILDWORKFLOWINSTANCEFAILEDEVENT']._serialized_start=2356 + _globals['_CHILDWORKFLOWINSTANCEFAILEDEVENT']._serialized_end=2585 + _globals['_DETACHEDWORKFLOWINSTANCECREATEDEVENT']._serialized_start=2587 + _globals['_DETACHEDWORKFLOWINSTANCECREATEDEVENT']._serialized_end=2645 + _globals['_TIMERORIGINCREATETIMER']._serialized_start=2647 + _globals['_TIMERORIGINCREATETIMER']._serialized_end=2671 + _globals['_TIMERORIGINEXTERNALEVENT']._serialized_start=2673 + _globals['_TIMERORIGINEXTERNALEVENT']._serialized_end=2713 + _globals['_TIMERORIGINACTIVITYRETRY']._serialized_start=2715 + _globals['_TIMERORIGINACTIVITYRETRY']._serialized_end=2766 + _globals['_TIMERORIGINCHILDWORKFLOWRETRY']._serialized_start=2768 + _globals['_TIMERORIGINCHILDWORKFLOWRETRY']._serialized_end=2819 + _globals['_TIMERCREATEDEVENT']._serialized_start=2822 + _globals['_TIMERCREATEDEVENT']._serialized_end=3229 + _globals['_TIMERFIREDEVENT']._serialized_start=3231 + _globals['_TIMERFIREDEVENT']._serialized_end=3309 + _globals['_WORKFLOWSTARTEDEVENT']._serialized_start=3311 + _globals['_WORKFLOWSTARTEDEVENT']._serialized_end=3385 + _globals['_WORKFLOWCOMPLETEDEVENT']._serialized_start=3387 + _globals['_WORKFLOWCOMPLETEDEVENT']._serialized_end=3411 + _globals['_EVENTSENTEVENT']._serialized_start=3413 + _globals['_EVENTSENTEVENT']._serialized_end=3508 + _globals['_EVENTRAISEDEVENT']._serialized_start=3510 + _globals['_EVENTRAISEDEVENT']._serialized_end=3587 + _globals['_CONTINUEASNEWEVENT']._serialized_start=3589 + _globals['_CONTINUEASNEWEVENT']._serialized_end=3654 + _globals['_EXECUTIONSUSPENDEDEVENT']._serialized_start=3656 + _globals['_EXECUTIONSUSPENDEDEVENT']._serialized_end=3726 + _globals['_EXECUTIONRESUMEDEVENT']._serialized_start=3728 + _globals['_EXECUTIONRESUMEDEVENT']._serialized_end=3796 + _globals['_EXECUTIONSTALLEDEVENT']._serialized_start=3798 + _globals['_EXECUTIONSTALLEDEVENT']._serialized_end=3895 + _globals['_HISTORYEVENT']._serialized_start=3898 + _globals['_HISTORYEVENT']._serialized_end=5172 + _globals['_PROPAGATEDHISTORYCHUNK']._serialized_start=5175 + _globals['_PROPAGATEDHISTORYCHUNK']._serialized_end=5325 + _globals['_PROPAGATEDHISTORY']._serialized_start=5327 + _globals['_PROPAGATEDHISTORY']._serialized_end=5428 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.pyi index 66ba6ba6..c2676d8a 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.pyi @@ -11,6 +11,7 @@ from google.protobuf import message as _message from google.protobuf import timestamp_pb2 as _timestamp_pb2 from google.protobuf import wrappers_pb2 as _wrappers_pb2 from google.protobuf.internal import containers as _containers +from dapr.ext.workflow._durabletask.internal import attestation_pb2 as _attestation_pb2 import builtins as _builtins from dapr.ext.workflow._durabletask.internal import orchestration_pb2 as _orchestration_pb2 import sys @@ -148,8 +149,14 @@ class TaskScheduledEvent(_message.Message): PARENTTRACECONTEXT_FIELD_NUMBER: _builtins.int TASKEXECUTIONID_FIELD_NUMBER: _builtins.int RERUNPARENTINSTANCEINFO_FIELD_NUMBER: _builtins.int + HISTORYPROPAGATIONSCOPE_FIELD_NUMBER: _builtins.int name: _builtins.str taskExecutionId: _builtins.str + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType + """History propagation scope used when this task was originally scheduled. + Persisted on the event so rerun can re-issue the task with the same + scope after the action has been discarded. + """ @_builtins.property def version(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -171,13 +178,19 @@ class TaskScheduledEvent(_message.Message): parentTraceContext: _orchestration_pb2.TraceContext | None = ..., taskExecutionId: _builtins.str = ..., rerunParentInstanceInfo: _orchestration_pb2.RerunParentInstanceInfo | None = ..., + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "input", b"input", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "version", b"version"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "input", b"input", "name", b"name", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "taskExecutionId", b"taskExecutionId", "version", b"version"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "name", b"name", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "taskExecutionId", b"taskExecutionId", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__historyPropagationScope: _TypeAlias = _typing.Literal["historyPropagationScope"] # noqa: Y015 + _WhichOneofArgType__historyPropagationScope: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope"] # noqa: Y015 _WhichOneofReturnType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["rerunParentInstanceInfo"] # noqa: Y015 _WhichOneofArgType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__historyPropagationScope) -> _WhichOneofReturnType__historyPropagationScope | None: ... + @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__rerunParentInstanceInfo) -> _WhichOneofReturnType__rerunParentInstanceInfo | None: ... Global___TaskScheduledEvent: _TypeAlias = TaskScheduledEvent # noqa: Y015 @@ -189,21 +202,50 @@ class TaskCompletedEvent(_message.Message): TASKSCHEDULEDID_FIELD_NUMBER: _builtins.int RESULT_FIELD_NUMBER: _builtins.int TASKEXECUTIONID_FIELD_NUMBER: _builtins.int + ATTESTATION_FIELD_NUMBER: _builtins.int + SIGNERCERTIFICATE_FIELD_NUMBER: _builtins.int taskScheduledId: _builtins.int taskExecutionId: _builtins.str + signerCertificate: _builtins.bytes + """Companion: DER-encoded X.509 certificate chain of the executor's + signing identity (leaf first, intermediates concatenated; same + format as SigningCertificate.certificate in backend_service.proto). + Wire-only; stripped by the receiver before the event is written to + history-NNNNNN. The certificate lives once in ext-sigcert-NNNNNN, + referenced by attestation payload's signerCertDigest. + """ @_builtins.property def result(self) -> _wrappers_pb2.StringValue: ... + @_builtins.property + def attestation(self) -> _attestation_pb2.ActivityCompletionAttestation: + """Attestation signed by the activity executor's SPIFFE identity. + Present when the activity was executed under a signing-enabled + configuration. Verified on inbox ingestion against the companion + signerCertificate and preserved in stored history for future audit + and forwarding via provenance bundles. + """ + def __init__( self, *, taskScheduledId: _builtins.int = ..., result: _wrappers_pb2.StringValue | None = ..., taskExecutionId: _builtins.str = ..., + attestation: _attestation_pb2.ActivityCompletionAttestation | None = ..., + signerCertificate: _builtins.bytes | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["result", b"result"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_attestation", b"_attestation", "_signerCertificate", b"_signerCertificate", "attestation", b"attestation", "result", b"result", "signerCertificate", b"signerCertificate"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["result", b"result", "taskExecutionId", b"taskExecutionId", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_attestation", b"_attestation", "_signerCertificate", b"_signerCertificate", "attestation", b"attestation", "result", b"result", "signerCertificate", b"signerCertificate", "taskExecutionId", b"taskExecutionId", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__attestation: _TypeAlias = _typing.Literal["attestation"] # noqa: Y015 + _WhichOneofArgType__attestation: _TypeAlias = _typing.Literal["_attestation", b"_attestation"] # noqa: Y015 + _WhichOneofReturnType__signerCertificate: _TypeAlias = _typing.Literal["signerCertificate"] # noqa: Y015 + _WhichOneofArgType__signerCertificate: _TypeAlias = _typing.Literal["_signerCertificate", b"_signerCertificate"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__attestation) -> _WhichOneofReturnType__attestation | None: ... + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__signerCertificate) -> _WhichOneofReturnType__signerCertificate | None: ... Global___TaskCompletedEvent: _TypeAlias = TaskCompletedEvent # noqa: Y015 @@ -214,21 +256,39 @@ class TaskFailedEvent(_message.Message): TASKSCHEDULEDID_FIELD_NUMBER: _builtins.int FAILUREDETAILS_FIELD_NUMBER: _builtins.int TASKEXECUTIONID_FIELD_NUMBER: _builtins.int + ATTESTATION_FIELD_NUMBER: _builtins.int + SIGNERCERTIFICATE_FIELD_NUMBER: _builtins.int taskScheduledId: _builtins.int taskExecutionId: _builtins.str + signerCertificate: _builtins.bytes + """Wire-only companion; see TaskCompletedEvent.signerCertificate.""" @_builtins.property def failureDetails(self) -> _orchestration_pb2.TaskFailureDetails: ... + @_builtins.property + def attestation(self) -> _attestation_pb2.ActivityCompletionAttestation: + """See TaskCompletedEvent.attestation.""" + def __init__( self, *, taskScheduledId: _builtins.int = ..., failureDetails: _orchestration_pb2.TaskFailureDetails | None = ..., taskExecutionId: _builtins.str = ..., + attestation: _attestation_pb2.ActivityCompletionAttestation | None = ..., + signerCertificate: _builtins.bytes | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["failureDetails", b"failureDetails"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_attestation", b"_attestation", "_signerCertificate", b"_signerCertificate", "attestation", b"attestation", "failureDetails", b"failureDetails", "signerCertificate", b"signerCertificate"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["failureDetails", b"failureDetails", "taskExecutionId", b"taskExecutionId", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_attestation", b"_attestation", "_signerCertificate", b"_signerCertificate", "attestation", b"attestation", "failureDetails", b"failureDetails", "signerCertificate", b"signerCertificate", "taskExecutionId", b"taskExecutionId", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__attestation: _TypeAlias = _typing.Literal["attestation"] # noqa: Y015 + _WhichOneofArgType__attestation: _TypeAlias = _typing.Literal["_attestation", b"_attestation"] # noqa: Y015 + _WhichOneofReturnType__signerCertificate: _TypeAlias = _typing.Literal["signerCertificate"] # noqa: Y015 + _WhichOneofArgType__signerCertificate: _TypeAlias = _typing.Literal["_signerCertificate", b"_signerCertificate"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__attestation) -> _WhichOneofReturnType__attestation | None: ... + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__signerCertificate) -> _WhichOneofReturnType__signerCertificate | None: ... Global___TaskFailedEvent: _TypeAlias = TaskFailedEvent # noqa: Y015 @@ -242,8 +302,14 @@ class ChildWorkflowInstanceCreatedEvent(_message.Message): INPUT_FIELD_NUMBER: _builtins.int PARENTTRACECONTEXT_FIELD_NUMBER: _builtins.int RERUNPARENTINSTANCEINFO_FIELD_NUMBER: _builtins.int + HISTORYPROPAGATIONSCOPE_FIELD_NUMBER: _builtins.int instanceId: _builtins.str name: _builtins.str + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType + """History propagation scope used when this child workflow was originally + scheduled. Persisted on the event so rerun can re-issue the child with + the same scope after the action has been discarded. + """ @_builtins.property def version(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -265,13 +331,19 @@ class ChildWorkflowInstanceCreatedEvent(_message.Message): input: _wrappers_pb2.StringValue | None = ..., parentTraceContext: _orchestration_pb2.TraceContext | None = ..., rerunParentInstanceInfo: _orchestration_pb2.RerunParentInstanceInfo | None = ..., + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "input", b"input", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "version", b"version"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "input", b"input", "instanceId", b"instanceId", "name", b"name", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "version", b"version"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "instanceId", b"instanceId", "name", b"name", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__historyPropagationScope: _TypeAlias = _typing.Literal["historyPropagationScope"] # noqa: Y015 + _WhichOneofArgType__historyPropagationScope: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope"] # noqa: Y015 _WhichOneofReturnType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["rerunParentInstanceInfo"] # noqa: Y015 _WhichOneofArgType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__historyPropagationScope) -> _WhichOneofReturnType__historyPropagationScope | None: ... + @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__rerunParentInstanceInfo) -> _WhichOneofReturnType__rerunParentInstanceInfo | None: ... Global___ChildWorkflowInstanceCreatedEvent: _TypeAlias = ChildWorkflowInstanceCreatedEvent # noqa: Y015 @@ -282,19 +354,48 @@ class ChildWorkflowInstanceCompletedEvent(_message.Message): TASKSCHEDULEDID_FIELD_NUMBER: _builtins.int RESULT_FIELD_NUMBER: _builtins.int + ATTESTATION_FIELD_NUMBER: _builtins.int + SIGNERCERTIFICATE_FIELD_NUMBER: _builtins.int taskScheduledId: _builtins.int + signerCertificate: _builtins.bytes + """Companion: DER-encoded X.509 certificate chain of the child's signing + identity (leaf first, intermediates concatenated; same format as + SigningCertificate.certificate in backend_service.proto). Wire-only; + stripped by the receiver before the event is written to + history-NNNNNN. The certificate lives once in ext-sigcert-NNNNNN, + referenced by attestation payload's signerCertDigest. + """ @_builtins.property def result(self) -> _wrappers_pb2.StringValue: ... + @_builtins.property + def attestation(self) -> _attestation_pb2.ChildCompletionAttestation: + """Attestation signed by the completing child workflow's SPIFFE + identity. Present when the child was executed under a signing-enabled + configuration. Verified on inbox ingestion against the companion + signerCertificate and preserved in stored history for future audit + and forwarding via provenance bundles. + """ + def __init__( self, *, taskScheduledId: _builtins.int = ..., result: _wrappers_pb2.StringValue | None = ..., + attestation: _attestation_pb2.ChildCompletionAttestation | None = ..., + signerCertificate: _builtins.bytes | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["result", b"result"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_attestation", b"_attestation", "_signerCertificate", b"_signerCertificate", "attestation", b"attestation", "result", b"result", "signerCertificate", b"signerCertificate"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["result", b"result", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_attestation", b"_attestation", "_signerCertificate", b"_signerCertificate", "attestation", b"attestation", "result", b"result", "signerCertificate", b"signerCertificate", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__attestation: _TypeAlias = _typing.Literal["attestation"] # noqa: Y015 + _WhichOneofArgType__attestation: _TypeAlias = _typing.Literal["_attestation", b"_attestation"] # noqa: Y015 + _WhichOneofReturnType__signerCertificate: _TypeAlias = _typing.Literal["signerCertificate"] # noqa: Y015 + _WhichOneofArgType__signerCertificate: _TypeAlias = _typing.Literal["_signerCertificate", b"_signerCertificate"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__attestation) -> _WhichOneofReturnType__attestation | None: ... + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__signerCertificate) -> _WhichOneofReturnType__signerCertificate | None: ... Global___ChildWorkflowInstanceCompletedEvent: _TypeAlias = ChildWorkflowInstanceCompletedEvent # noqa: Y015 @@ -304,22 +405,66 @@ class ChildWorkflowInstanceFailedEvent(_message.Message): TASKSCHEDULEDID_FIELD_NUMBER: _builtins.int FAILUREDETAILS_FIELD_NUMBER: _builtins.int + ATTESTATION_FIELD_NUMBER: _builtins.int + SIGNERCERTIFICATE_FIELD_NUMBER: _builtins.int taskScheduledId: _builtins.int + signerCertificate: _builtins.bytes + """Wire-only companion; see + ChildWorkflowInstanceCompletedEvent.signerCertificate. + """ @_builtins.property def failureDetails(self) -> _orchestration_pb2.TaskFailureDetails: ... + @_builtins.property + def attestation(self) -> _attestation_pb2.ChildCompletionAttestation: + """See ChildWorkflowInstanceCompletedEvent.attestation.""" + def __init__( self, *, taskScheduledId: _builtins.int = ..., failureDetails: _orchestration_pb2.TaskFailureDetails | None = ..., + attestation: _attestation_pb2.ChildCompletionAttestation | None = ..., + signerCertificate: _builtins.bytes | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["failureDetails", b"failureDetails"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_attestation", b"_attestation", "_signerCertificate", b"_signerCertificate", "attestation", b"attestation", "failureDetails", b"failureDetails", "signerCertificate", b"signerCertificate"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["failureDetails", b"failureDetails", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_attestation", b"_attestation", "_signerCertificate", b"_signerCertificate", "attestation", b"attestation", "failureDetails", b"failureDetails", "signerCertificate", b"signerCertificate", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__attestation: _TypeAlias = _typing.Literal["attestation"] # noqa: Y015 + _WhichOneofArgType__attestation: _TypeAlias = _typing.Literal["_attestation", b"_attestation"] # noqa: Y015 + _WhichOneofReturnType__signerCertificate: _TypeAlias = _typing.Literal["signerCertificate"] # noqa: Y015 + _WhichOneofArgType__signerCertificate: _TypeAlias = _typing.Literal["_signerCertificate", b"_signerCertificate"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__attestation) -> _WhichOneofReturnType__attestation | None: ... + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__signerCertificate) -> _WhichOneofReturnType__signerCertificate | None: ... Global___ChildWorkflowInstanceFailedEvent: _TypeAlias = ChildWorkflowInstanceFailedEvent # noqa: Y015 +@_typing.final +class DetachedWorkflowInstanceCreatedEvent(_message.Message): + """DetachedWorkflowInstanceCreatedEvent records that a running workflow + created a new, detached workflow instance via CreateDetachedWorkflowAction. + The new workflow has no parent linkage (no completion or failure flows + back), so this event only stores a pointer to the spawned instance — the + inputs themselves are consumed directly from the action when scheduling. + Replay matches on instanceId, so it is the same value the action carried. + """ + + DESCRIPTOR: _descriptor.Descriptor + + INSTANCEID_FIELD_NUMBER: _builtins.int + instanceId: _builtins.str + def __init__( + self, + *, + instanceId: _builtins.str = ..., + ) -> None: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + +Global___DetachedWorkflowInstanceCreatedEvent: _TypeAlias = DetachedWorkflowInstanceCreatedEvent # noqa: Y015 + @_typing.final class TimerOriginCreateTimer(_message.Message): """Indicates the timer was created by a createTimer call with no special origin.""" @@ -656,6 +801,7 @@ class HistoryEvent(_message.Message): EXECUTIONSUSPENDED_FIELD_NUMBER: _builtins.int EXECUTIONRESUMED_FIELD_NUMBER: _builtins.int EXECUTIONSTALLED_FIELD_NUMBER: _builtins.int + DETACHEDWORKFLOWINSTANCECREATED_FIELD_NUMBER: _builtins.int ROUTER_FIELD_NUMBER: _builtins.int eventId: _builtins.int @_builtins.property @@ -699,6 +845,8 @@ class HistoryEvent(_message.Message): @_builtins.property def executionStalled(self) -> Global___ExecutionStalledEvent: ... @_builtins.property + def detachedWorkflowInstanceCreated(self) -> Global___DetachedWorkflowInstanceCreatedEvent: ... + @_builtins.property def router(self) -> _orchestration_pb2.TaskRouter: ... def __init__( self, @@ -724,15 +872,16 @@ class HistoryEvent(_message.Message): executionSuspended: Global___ExecutionSuspendedEvent | None = ..., executionResumed: Global___ExecutionResumedEvent | None = ..., executionStalled: Global___ExecutionStalledEvent | None = ..., + detachedWorkflowInstanceCreated: Global___DetachedWorkflowInstanceCreatedEvent | None = ..., router: _orchestration_pb2.TaskRouter | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "childWorkflowInstanceCompleted", b"childWorkflowInstanceCompleted", "childWorkflowInstanceCreated", b"childWorkflowInstanceCreated", "childWorkflowInstanceFailed", b"childWorkflowInstanceFailed", "continueAsNew", b"continueAsNew", "eventRaised", b"eventRaised", "eventSent", b"eventSent", "eventType", b"eventType", "executionCompleted", b"executionCompleted", "executionResumed", b"executionResumed", "executionStalled", b"executionStalled", "executionStarted", b"executionStarted", "executionSuspended", b"executionSuspended", "executionTerminated", b"executionTerminated", "router", b"router", "taskCompleted", b"taskCompleted", "taskFailed", b"taskFailed", "taskScheduled", b"taskScheduled", "timerCreated", b"timerCreated", "timerFired", b"timerFired", "timestamp", b"timestamp", "workflowCompleted", b"workflowCompleted", "workflowStarted", b"workflowStarted"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "childWorkflowInstanceCompleted", b"childWorkflowInstanceCompleted", "childWorkflowInstanceCreated", b"childWorkflowInstanceCreated", "childWorkflowInstanceFailed", b"childWorkflowInstanceFailed", "continueAsNew", b"continueAsNew", "detachedWorkflowInstanceCreated", b"detachedWorkflowInstanceCreated", "eventRaised", b"eventRaised", "eventSent", b"eventSent", "eventType", b"eventType", "executionCompleted", b"executionCompleted", "executionResumed", b"executionResumed", "executionStalled", b"executionStalled", "executionStarted", b"executionStarted", "executionSuspended", b"executionSuspended", "executionTerminated", b"executionTerminated", "router", b"router", "taskCompleted", b"taskCompleted", "taskFailed", b"taskFailed", "taskScheduled", b"taskScheduled", "timerCreated", b"timerCreated", "timerFired", b"timerFired", "timestamp", b"timestamp", "workflowCompleted", b"workflowCompleted", "workflowStarted", b"workflowStarted"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "childWorkflowInstanceCompleted", b"childWorkflowInstanceCompleted", "childWorkflowInstanceCreated", b"childWorkflowInstanceCreated", "childWorkflowInstanceFailed", b"childWorkflowInstanceFailed", "continueAsNew", b"continueAsNew", "eventId", b"eventId", "eventRaised", b"eventRaised", "eventSent", b"eventSent", "eventType", b"eventType", "executionCompleted", b"executionCompleted", "executionResumed", b"executionResumed", "executionStalled", b"executionStalled", "executionStarted", b"executionStarted", "executionSuspended", b"executionSuspended", "executionTerminated", b"executionTerminated", "router", b"router", "taskCompleted", b"taskCompleted", "taskFailed", b"taskFailed", "taskScheduled", b"taskScheduled", "timerCreated", b"timerCreated", "timerFired", b"timerFired", "timestamp", b"timestamp", "workflowCompleted", b"workflowCompleted", "workflowStarted", b"workflowStarted"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "childWorkflowInstanceCompleted", b"childWorkflowInstanceCompleted", "childWorkflowInstanceCreated", b"childWorkflowInstanceCreated", "childWorkflowInstanceFailed", b"childWorkflowInstanceFailed", "continueAsNew", b"continueAsNew", "detachedWorkflowInstanceCreated", b"detachedWorkflowInstanceCreated", "eventId", b"eventId", "eventRaised", b"eventRaised", "eventSent", b"eventSent", "eventType", b"eventType", "executionCompleted", b"executionCompleted", "executionResumed", b"executionResumed", "executionStalled", b"executionStalled", "executionStarted", b"executionStarted", "executionSuspended", b"executionSuspended", "executionTerminated", b"executionTerminated", "router", b"router", "taskCompleted", b"taskCompleted", "taskFailed", b"taskFailed", "taskScheduled", b"taskScheduled", "timerCreated", b"timerCreated", "timerFired", b"timerFired", "timestamp", b"timestamp", "workflowCompleted", b"workflowCompleted", "workflowStarted", b"workflowStarted"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__router: _TypeAlias = _typing.Literal["router"] # noqa: Y015 _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 - _WhichOneofReturnType_eventType: _TypeAlias = _typing.Literal["executionStarted", "executionCompleted", "executionTerminated", "taskScheduled", "taskCompleted", "taskFailed", "childWorkflowInstanceCreated", "childWorkflowInstanceCompleted", "childWorkflowInstanceFailed", "timerCreated", "timerFired", "workflowStarted", "workflowCompleted", "eventSent", "eventRaised", "continueAsNew", "executionSuspended", "executionResumed", "executionStalled"] # noqa: Y015 + _WhichOneofReturnType_eventType: _TypeAlias = _typing.Literal["executionStarted", "executionCompleted", "executionTerminated", "taskScheduled", "taskCompleted", "taskFailed", "childWorkflowInstanceCreated", "childWorkflowInstanceCompleted", "childWorkflowInstanceFailed", "timerCreated", "timerFired", "workflowStarted", "workflowCompleted", "eventSent", "eventRaised", "continueAsNew", "executionSuspended", "executionResumed", "executionStalled", "detachedWorkflowInstanceCreated"] # noqa: Y015 _WhichOneofArgType_eventType: _TypeAlias = _typing.Literal["eventType", b"eventType"] # noqa: Y015 @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... @@ -740,3 +889,98 @@ class HistoryEvent(_message.Message): def WhichOneof(self, oneof_group: _WhichOneofArgType_eventType) -> _WhichOneofReturnType_eventType | None: ... Global___HistoryEvent: _TypeAlias = HistoryEvent # noqa: Y015 + +@_typing.final +class PropagatedHistoryChunk(_message.Message): + """A self-contained range of events produced by a single app, used when + history from multiple workflows is propagated to a downstream workflow + or activity. Each chunk owns the raw event bytes its producer signed; + receivers digest those bytes directly and decode them into typed + HistoryEvents on demand. + """ + + DESCRIPTOR: _descriptor.Descriptor + + RAWEVENTS_FIELD_NUMBER: _builtins.int + APPID_FIELD_NUMBER: _builtins.int + INSTANCEID_FIELD_NUMBER: _builtins.int + WORKFLOWNAME_FIELD_NUMBER: _builtins.int + RAWSIGNATURES_FIELD_NUMBER: _builtins.int + SIGNINGCERTCHAINS_FIELD_NUMBER: _builtins.int + appId: _builtins.str + instanceId: _builtins.str + """The workflow instance ID/name that produced the events in this chunk.""" + workflowName: _builtins.str + @_builtins.property + def rawEvents(self) -> _containers.RepeatedScalarFieldContainer[_builtins.bytes]: + """Raw deterministic bytes of each HistoryEvent in this chunk, in execution + order. The producer marshals each event once and signs over these exact + bytes; receivers digest them directly and never re-marshal, so chunk + verification is independent of protobuf marshaler-version stability + across producer and receiver. This mirrors the approach attestations use + for ioDigest: signed bytes travel verbatim end-to-end. The chunk's + length is len(rawEvents). + """ + + @_builtins.property + def rawSignatures(self) -> _containers.RepeatedScalarFieldContainer[_builtins.bytes]: + """Raw deterministic bytes of each HistorySignature message produced by the + chunk's app at dispatch time, covering rawEvents in order. Receivers + unmarshal these on demand to verify the chain. Raw bytes are required + because HistorySignature.previousSignatureDigest commits to the exact + persisted serialization; re-marshaling on the wire would break chain + linkage. See backend_service.proto: HistorySignature. + """ + + @_builtins.property + def signingCertChains(self) -> _containers.RepeatedScalarFieldContainer[_builtins.bytes]: + """X.509 certificate chains of the chunk app's signing identities, + DER-concatenated leaf-first then intermediates (same encoding as + backend_service.proto: SigningCertificate.certificate). Each + HistorySignature in rawSignatures has a certificateIndex that indexes + into this list, scoped to the chunk's producer app. Raw bytes here avoid + a circular import on backend_service.proto's SigningCertificate type. + """ + + def __init__( + self, + *, + rawEvents: _abc.Iterable[_builtins.bytes] | None = ..., + appId: _builtins.str = ..., + instanceId: _builtins.str = ..., + workflowName: _builtins.str = ..., + rawSignatures: _abc.Iterable[_builtins.bytes] | None = ..., + signingCertChains: _abc.Iterable[_builtins.bytes] | None = ..., + ) -> None: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["appId", b"appId", "instanceId", b"instanceId", "rawEvents", b"rawEvents", "rawSignatures", b"rawSignatures", "signingCertChains", b"signingCertChains", "workflowName", b"workflowName"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + +Global___PropagatedHistoryChunk: _TypeAlias = PropagatedHistoryChunk # noqa: Y015 + +@_typing.final +class PropagatedHistory(_message.Message): + DESCRIPTOR: _descriptor.Descriptor + + SCOPE_FIELD_NUMBER: _builtins.int + CHUNKS_FIELD_NUMBER: _builtins.int + scope: _orchestration_pb2.HistoryPropagationScope.ValueType + """The propagation scope that was used to produce this history.""" + @_builtins.property + def chunks(self) -> _containers.RepeatedCompositeFieldContainer[Global___PropagatedHistoryChunk]: + """Per-app history chunks. Each chunk owns the raw event bytes its producer + signed (PropagatedHistoryChunk.rawEvents); receivers digest those bytes + directly and decode them into typed HistoryEvents on demand. Chunks are + ordered, non-overlapping, and together describe the full propagated + event sequence. + """ + + def __init__( + self, + *, + scope: _orchestration_pb2.HistoryPropagationScope.ValueType = ..., + chunks: _abc.Iterable[Global___PropagatedHistoryChunk] | None = ..., + ) -> None: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["chunks", b"chunks", "scope", b"scope"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + +Global___PropagatedHistory: _TypeAlias = PropagatedHistory # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.py index f9727296..cfe77c70 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.py @@ -26,7 +26,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13orchestration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"K\n\nTaskRouter\x12\x13\n\x0bsourceAppID\x18\x01 \x01(\t\x12\x18\n\x0btargetAppID\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_targetAppID\">\n\x0fWorkflowVersion\x12\x0f\n\x07patches\x18\x01 \x03(\t\x12\x11\n\x04name\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x07\n\x05_name\"Y\n\x10WorkflowInstance\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x31\n\x0b\x65xecutionId\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xb2\x01\n\x12TaskFailureDetails\x12\x11\n\terrorType\x18\x01 \x01(\t\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x30\n\nstackTrace\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x0cinnerFailure\x18\x04 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x16\n\x0eisNonRetriable\x18\x05 \x01(\x08\"\xd3\x01\n\x12ParentInstanceInfo\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12*\n\x04name\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12\x12\n\x05\x61ppID\x18\x05 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_appID\"-\n\x17RerunParentInstanceInfo\x12\x12\n\ninstanceID\x18\x01 \x01(\t\"i\n\x0cTraceContext\x12\x13\n\x0btraceParent\x18\x01 \x01(\t\x12\x12\n\x06spanID\x18\x02 \x01(\tB\x02\x18\x01\x12\x30\n\ntraceState\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xef\x05\n\rWorkflowState\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x0eworkflowStatus\x18\x04 \x01(\x0e\x32\x14.OrchestrationStatus\x12;\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10\x63reatedTimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x14lastUpdatedTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05input\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x06output\x18\t \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ustomStatus\x18\n \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x0b \x01(\x0b\x32\x13.TaskFailureDetails\x12\x31\n\x0b\x65xecutionId\x18\x0c \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x36\n\x12\x63ompletedTimestamp\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x10parentInstanceId\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12&\n\x04tags\x18\x0f \x03(\x0b\x32\x18.WorkflowState.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01*>\n\rStalledReason\x12\x12\n\x0ePATCH_MISMATCH\x10\x00\x12\x19\n\x15VERSION_NOT_AVAILABLE\x10\x01*\xd7\x02\n\x13OrchestrationStatus\x12 \n\x1cORCHESTRATION_STATUS_RUNNING\x10\x00\x12\"\n\x1eORCHESTRATION_STATUS_COMPLETED\x10\x01\x12)\n%ORCHESTRATION_STATUS_CONTINUED_AS_NEW\x10\x02\x12\x1f\n\x1bORCHESTRATION_STATUS_FAILED\x10\x03\x12!\n\x1dORCHESTRATION_STATUS_CANCELED\x10\x04\x12#\n\x1fORCHESTRATION_STATUS_TERMINATED\x10\x05\x12 \n\x1cORCHESTRATION_STATUS_PENDING\x10\x06\x12\"\n\x1eORCHESTRATION_STATUS_SUSPENDED\x10\x07\x12 \n\x1cORCHESTRATION_STATUS_STALLED\x10\x08\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13orchestration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\x83\x01\n\nTaskRouter\x12\x13\n\x0bsourceAppID\x18\x01 \x01(\t\x12\x18\n\x0btargetAppID\x18\x02 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12targetAppNamespace\x18\x03 \x01(\tH\x01\x88\x01\x01\x42\x0e\n\x0c_targetAppIDB\x15\n\x13_targetAppNamespace\">\n\x0fWorkflowVersion\x12\x0f\n\x07patches\x18\x01 \x03(\t\x12\x11\n\x04name\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x07\n\x05_name\"Y\n\x10WorkflowInstance\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x31\n\x0b\x65xecutionId\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xb2\x01\n\x12TaskFailureDetails\x12\x11\n\terrorType\x18\x01 \x01(\t\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x30\n\nstackTrace\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x0cinnerFailure\x18\x04 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x16\n\x0eisNonRetriable\x18\x05 \x01(\x08\"\xff\x01\n\x12ParentInstanceInfo\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12*\n\x04name\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12\x12\n\x05\x61ppID\x18\x05 \x01(\tH\x00\x88\x01\x01\x12\x19\n\x0c\x61ppNamespace\x18\x06 \x01(\tH\x01\x88\x01\x01\x42\x08\n\x06_appIDB\x0f\n\r_appNamespace\"-\n\x17RerunParentInstanceInfo\x12\x12\n\ninstanceID\x18\x01 \x01(\t\"i\n\x0cTraceContext\x12\x13\n\x0btraceParent\x18\x01 \x01(\t\x12\x12\n\x06spanID\x18\x02 \x01(\tB\x02\x18\x01\x12\x30\n\ntraceState\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xef\x05\n\rWorkflowState\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x0eworkflowStatus\x18\x04 \x01(\x0e\x32\x14.OrchestrationStatus\x12;\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10\x63reatedTimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x14lastUpdatedTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05input\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x06output\x18\t \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ustomStatus\x18\n \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x0b \x01(\x0b\x32\x13.TaskFailureDetails\x12\x31\n\x0b\x65xecutionId\x18\x0c \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x36\n\x12\x63ompletedTimestamp\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x10parentInstanceId\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12&\n\x04tags\x18\x0f \x03(\x0b\x32\x18.WorkflowState.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01*Y\n\rStalledReason\x12\x12\n\x0ePATCH_MISMATCH\x10\x00\x12\x19\n\x15VERSION_NOT_AVAILABLE\x10\x01\x12\x19\n\x15PAYLOAD_SIZE_EXCEEDED\x10\x02*\xd7\x02\n\x13OrchestrationStatus\x12 \n\x1cORCHESTRATION_STATUS_RUNNING\x10\x00\x12\"\n\x1eORCHESTRATION_STATUS_COMPLETED\x10\x01\x12)\n%ORCHESTRATION_STATUS_CONTINUED_AS_NEW\x10\x02\x12\x1f\n\x1bORCHESTRATION_STATUS_FAILED\x10\x03\x12!\n\x1dORCHESTRATION_STATUS_CANCELED\x10\x04\x12#\n\x1fORCHESTRATION_STATUS_TERMINATED\x10\x05\x12 \n\x1cORCHESTRATION_STATUS_PENDING\x10\x06\x12\"\n\x1eORCHESTRATION_STATUS_SUSPENDED\x10\x07\x12 \n\x1cORCHESTRATION_STATUS_STALLED\x10\x08*\x8f\x01\n\x17HistoryPropagationScope\x12\"\n\x1eHISTORY_PROPAGATION_SCOPE_NONE\x10\x00\x12)\n%HISTORY_PROPAGATION_SCOPE_OWN_HISTORY\x10\x01\x12%\n!HISTORY_PROPAGATION_SCOPE_LINEAGE\x10\x02\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -38,26 +38,28 @@ _globals['_TRACECONTEXT'].fields_by_name['spanID']._serialized_options = b'\030\001' _globals['_WORKFLOWSTATE_TAGSENTRY']._loaded_options = None _globals['_WORKFLOWSTATE_TAGSENTRY']._serialized_options = b'8\001' - _globals['_STALLEDREASON']._serialized_start=1623 - _globals['_STALLEDREASON']._serialized_end=1685 - _globals['_ORCHESTRATIONSTATUS']._serialized_start=1688 - _globals['_ORCHESTRATIONSTATUS']._serialized_end=2031 - _globals['_TASKROUTER']._serialized_start=88 - _globals['_TASKROUTER']._serialized_end=163 - _globals['_WORKFLOWVERSION']._serialized_start=165 - _globals['_WORKFLOWVERSION']._serialized_end=227 - _globals['_WORKFLOWINSTANCE']._serialized_start=229 - _globals['_WORKFLOWINSTANCE']._serialized_end=318 - _globals['_TASKFAILUREDETAILS']._serialized_start=321 - _globals['_TASKFAILUREDETAILS']._serialized_end=499 - _globals['_PARENTINSTANCEINFO']._serialized_start=502 - _globals['_PARENTINSTANCEINFO']._serialized_end=713 - _globals['_RERUNPARENTINSTANCEINFO']._serialized_start=715 - _globals['_RERUNPARENTINSTANCEINFO']._serialized_end=760 - _globals['_TRACECONTEXT']._serialized_start=762 - _globals['_TRACECONTEXT']._serialized_end=867 - _globals['_WORKFLOWSTATE']._serialized_start=870 - _globals['_WORKFLOWSTATE']._serialized_end=1621 - _globals['_WORKFLOWSTATE_TAGSENTRY']._serialized_start=1578 - _globals['_WORKFLOWSTATE_TAGSENTRY']._serialized_end=1621 + _globals['_STALLEDREASON']._serialized_start=1724 + _globals['_STALLEDREASON']._serialized_end=1813 + _globals['_ORCHESTRATIONSTATUS']._serialized_start=1816 + _globals['_ORCHESTRATIONSTATUS']._serialized_end=2159 + _globals['_HISTORYPROPAGATIONSCOPE']._serialized_start=2162 + _globals['_HISTORYPROPAGATIONSCOPE']._serialized_end=2305 + _globals['_TASKROUTER']._serialized_start=89 + _globals['_TASKROUTER']._serialized_end=220 + _globals['_WORKFLOWVERSION']._serialized_start=222 + _globals['_WORKFLOWVERSION']._serialized_end=284 + _globals['_WORKFLOWINSTANCE']._serialized_start=286 + _globals['_WORKFLOWINSTANCE']._serialized_end=375 + _globals['_TASKFAILUREDETAILS']._serialized_start=378 + _globals['_TASKFAILUREDETAILS']._serialized_end=556 + _globals['_PARENTINSTANCEINFO']._serialized_start=559 + _globals['_PARENTINSTANCEINFO']._serialized_end=814 + _globals['_RERUNPARENTINSTANCEINFO']._serialized_start=816 + _globals['_RERUNPARENTINSTANCEINFO']._serialized_end=861 + _globals['_TRACECONTEXT']._serialized_start=863 + _globals['_TRACECONTEXT']._serialized_end=968 + _globals['_WORKFLOWSTATE']._serialized_start=971 + _globals['_WORKFLOWSTATE']._serialized_end=1722 + _globals['_WORKFLOWSTATE_TAGSENTRY']._serialized_start=1679 + _globals['_WORKFLOWSTATE_TAGSENTRY']._serialized_end=1722 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.pyi index ce5f416c..10b22698 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.pyi @@ -36,11 +36,13 @@ class _StalledReasonEnumTypeWrapper(_enum_type_wrapper._EnumTypeWrapper[_Stalled DESCRIPTOR: _descriptor.EnumDescriptor PATCH_MISMATCH: _StalledReason.ValueType # 0 VERSION_NOT_AVAILABLE: _StalledReason.ValueType # 1 + PAYLOAD_SIZE_EXCEEDED: _StalledReason.ValueType # 2 class StalledReason(_StalledReason, metaclass=_StalledReasonEnumTypeWrapper): ... PATCH_MISMATCH: StalledReason.ValueType # 0 VERSION_NOT_AVAILABLE: StalledReason.ValueType # 1 +PAYLOAD_SIZE_EXCEEDED: StalledReason.ValueType # 2 Global___StalledReason: _TypeAlias = StalledReason # noqa: Y015 class _OrchestrationStatus: @@ -72,27 +74,75 @@ ORCHESTRATION_STATUS_SUSPENDED: OrchestrationStatus.ValueType # 7 ORCHESTRATION_STATUS_STALLED: OrchestrationStatus.ValueType # 8 Global___OrchestrationStatus: _TypeAlias = OrchestrationStatus # noqa: Y015 +class _HistoryPropagationScope: + ValueType = _typing.NewType("ValueType", _builtins.int) + V: _TypeAlias = ValueType # noqa: Y015 + +class _HistoryPropagationScopeEnumTypeWrapper(_enum_type_wrapper._EnumTypeWrapper[_HistoryPropagationScope.ValueType], _builtins.type): + DESCRIPTOR: _descriptor.EnumDescriptor + HISTORY_PROPAGATION_SCOPE_NONE: _HistoryPropagationScope.ValueType # 0 + """No propagation. This is the default for an unset/missing field; the + child receives no history from the caller. + """ + HISTORY_PROPAGATION_SCOPE_OWN_HISTORY: _HistoryPropagationScope.ValueType # 1 + """Propagate the caller's own history events only. The child does + not see any ancestral history (trust boundary). + """ + HISTORY_PROPAGATION_SCOPE_LINEAGE: _HistoryPropagationScope.ValueType # 2 + """Propagate the caller's own history events AND the full ancestral + chain. Any propagated history this workflow received from its + parent is forwarded to the child. + """ + +class HistoryPropagationScope(_HistoryPropagationScope, metaclass=_HistoryPropagationScopeEnumTypeWrapper): + """HistoryPropagationScope controls how history is propagated to a child + workflow or activity + """ + +HISTORY_PROPAGATION_SCOPE_NONE: HistoryPropagationScope.ValueType # 0 +"""No propagation. This is the default for an unset/missing field; the +child receives no history from the caller. +""" +HISTORY_PROPAGATION_SCOPE_OWN_HISTORY: HistoryPropagationScope.ValueType # 1 +"""Propagate the caller's own history events only. The child does +not see any ancestral history (trust boundary). +""" +HISTORY_PROPAGATION_SCOPE_LINEAGE: HistoryPropagationScope.ValueType # 2 +"""Propagate the caller's own history events AND the full ancestral +chain. Any propagated history this workflow received from its +parent is forwarded to the child. +""" +Global___HistoryPropagationScope: _TypeAlias = HistoryPropagationScope # noqa: Y015 + @_typing.final class TaskRouter(_message.Message): DESCRIPTOR: _descriptor.Descriptor SOURCEAPPID_FIELD_NUMBER: _builtins.int TARGETAPPID_FIELD_NUMBER: _builtins.int + TARGETAPPNAMESPACE_FIELD_NUMBER: _builtins.int sourceAppID: _builtins.str targetAppID: _builtins.str + targetAppNamespace: _builtins.str def __init__( self, *, sourceAppID: _builtins.str = ..., targetAppID: _builtins.str | None = ..., + targetAppNamespace: _builtins.str | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_targetAppID", b"_targetAppID", "targetAppID", b"targetAppID"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_targetAppID", b"_targetAppID", "_targetAppNamespace", b"_targetAppNamespace", "targetAppID", b"targetAppID", "targetAppNamespace", b"targetAppNamespace"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_targetAppID", b"_targetAppID", "sourceAppID", b"sourceAppID", "targetAppID", b"targetAppID"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_targetAppID", b"_targetAppID", "_targetAppNamespace", b"_targetAppNamespace", "sourceAppID", b"sourceAppID", "targetAppID", b"targetAppID", "targetAppNamespace", b"targetAppNamespace"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__targetAppID: _TypeAlias = _typing.Literal["targetAppID"] # noqa: Y015 _WhichOneofArgType__targetAppID: _TypeAlias = _typing.Literal["_targetAppID", b"_targetAppID"] # noqa: Y015 + _WhichOneofReturnType__targetAppNamespace: _TypeAlias = _typing.Literal["targetAppNamespace"] # noqa: Y015 + _WhichOneofArgType__targetAppNamespace: _TypeAlias = _typing.Literal["_targetAppNamespace", b"_targetAppNamespace"] # noqa: Y015 + @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__targetAppID) -> _WhichOneofReturnType__targetAppID | None: ... + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__targetAppNamespace) -> _WhichOneofReturnType__targetAppNamespace | None: ... Global___TaskRouter: _TypeAlias = TaskRouter # noqa: Y015 @@ -185,8 +235,10 @@ class ParentInstanceInfo(_message.Message): VERSION_FIELD_NUMBER: _builtins.int WORKFLOWINSTANCE_FIELD_NUMBER: _builtins.int APPID_FIELD_NUMBER: _builtins.int + APPNAMESPACE_FIELD_NUMBER: _builtins.int taskScheduledId: _builtins.int appID: _builtins.str + appNamespace: _builtins.str @_builtins.property def name(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -201,14 +253,20 @@ class ParentInstanceInfo(_message.Message): version: _wrappers_pb2.StringValue | None = ..., workflowInstance: Global___WorkflowInstance | None = ..., appID: _builtins.str | None = ..., + appNamespace: _builtins.str | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_appID", b"_appID", "appID", b"appID", "name", b"name", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_appID", b"_appID", "_appNamespace", b"_appNamespace", "appID", b"appID", "appNamespace", b"appNamespace", "name", b"name", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_appID", b"_appID", "appID", b"appID", "name", b"name", "taskScheduledId", b"taskScheduledId", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_appID", b"_appID", "_appNamespace", b"_appNamespace", "appID", b"appID", "appNamespace", b"appNamespace", "name", b"name", "taskScheduledId", b"taskScheduledId", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__appID: _TypeAlias = _typing.Literal["appID"] # noqa: Y015 _WhichOneofArgType__appID: _TypeAlias = _typing.Literal["_appID", b"_appID"] # noqa: Y015 + _WhichOneofReturnType__appNamespace: _TypeAlias = _typing.Literal["appNamespace"] # noqa: Y015 + _WhichOneofArgType__appNamespace: _TypeAlias = _typing.Literal["_appNamespace", b"_appNamespace"] # noqa: Y015 + @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__appID) -> _WhichOneofReturnType__appID | None: ... + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__appNamespace) -> _WhichOneofReturnType__appNamespace | None: ... Global___ParentInstanceInfo: _TypeAlias = ParentInstanceInfo # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.py index 718bb76c..0d0b4bd0 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.py @@ -28,7 +28,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aorchestrator_actions.proto\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xc4\x01\n\x12ScheduleTaskAction\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12 \n\x06router\x18\x04 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x12\x17\n\x0ftaskExecutionId\x18\x05 \x01(\tB\t\n\x07_router\"\xc6\x01\n\x19\x43reateChildWorkflowAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12 \n\x06router\x18\x05 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x42\t\n\x07_router\"\xbb\x02\n\x11\x43reateTimerAction\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12.\n\x0b\x63reateTimer\x18\x03 \x01(\x0b\x32\x17.TimerOriginCreateTimerH\x00\x12\x32\n\rexternalEvent\x18\x04 \x01(\x0b\x32\x19.TimerOriginExternalEventH\x00\x12\x32\n\ractivityRetry\x18\x05 \x01(\x0b\x32\x19.TimerOriginActivityRetryH\x00\x12<\n\x12\x63hildWorkflowRetry\x18\x06 \x01(\x0b\x32\x1e.TimerOriginChildWorkflowRetryH\x00\x42\x08\n\x06originB\x07\n\x05_name\"p\n\x0fSendEventAction\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x0c\n\x04name\x18\x02 \x01(\t\x12*\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xaa\x02\n\x16\x43ompleteWorkflowAction\x12,\n\x0eworkflowStatus\x18\x01 \x01(\x0e\x32\x14.OrchestrationStatus\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\nnewVersion\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12&\n\x0f\x63\x61rryoverEvents\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12+\n\x0e\x66\x61ilureDetails\x18\x06 \x01(\x0b\x32\x13.TaskFailureDetails\"l\n\x17TerminateWorkflowAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x0f\n\x07recurse\x18\x03 \x01(\x08\"#\n!WorkflowVersionNotAvailableAction\"\xd6\x03\n\x0eWorkflowAction\x12\n\n\x02id\x18\x01 \x01(\x05\x12+\n\x0cscheduleTask\x18\x02 \x01(\x0b\x32\x13.ScheduleTaskActionH\x00\x12\x39\n\x13\x63reateChildWorkflow\x18\x03 \x01(\x0b\x32\x1a.CreateChildWorkflowActionH\x00\x12)\n\x0b\x63reateTimer\x18\x04 \x01(\x0b\x32\x12.CreateTimerActionH\x00\x12%\n\tsendEvent\x18\x05 \x01(\x0b\x32\x10.SendEventActionH\x00\x12\x33\n\x10\x63ompleteWorkflow\x18\x06 \x01(\x0b\x32\x17.CompleteWorkflowActionH\x00\x12\x35\n\x11terminateWorkflow\x18\x07 \x01(\x0b\x32\x18.TerminateWorkflowActionH\x00\x12I\n\x1bworkflowVersionNotAvailable\x18\n \x01(\x0b\x32\".WorkflowVersionNotAvailableActionH\x00\x12 \n\x06router\x18\t \x01(\x0b\x32\x0b.TaskRouterH\x01\x88\x01\x01\x42\x14\n\x12workflowActionTypeB\t\n\x07_routerJ\x04\x08\x08\x10\tBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aorchestrator_actions.proto\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xa0\x02\n\x12ScheduleTaskAction\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12 \n\x06router\x18\x04 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x12\x17\n\x0ftaskExecutionId\x18\x05 \x01(\t\x12>\n\x17historyPropagationScope\x18\x06 \x01(\x0e\x32\x18.HistoryPropagationScopeH\x01\x88\x01\x01\x42\t\n\x07_routerB\x1a\n\x18_historyPropagationScope\"\xa2\x02\n\x19\x43reateChildWorkflowAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12 \n\x06router\x18\x05 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x12>\n\x17historyPropagationScope\x18\x06 \x01(\x0e\x32\x18.HistoryPropagationScopeH\x01\x88\x01\x01\x42\t\n\x07_routerB\x1a\n\x18_historyPropagationScope\"\x85\x04\n\x1c\x43reateDetachedWorkflowAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12@\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x88\x01\x01\x12\x31\n\x0b\x65xecutionId\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x35\n\x04tags\x18\x07 \x03(\x0b\x32\'.CreateDetachedWorkflowAction.TagsEntry\x12.\n\x12parentTraceContext\x18\x08 \x01(\x0b\x32\r.TraceContextH\x01\x88\x01\x01\x12 \n\x06router\x18\t \x01(\x0b\x32\x0b.TaskRouterH\x02\x88\x01\x01\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x1a\n\x18_scheduledStartTimestampB\x15\n\x13_parentTraceContextB\t\n\x07_router\"\xbb\x02\n\x11\x43reateTimerAction\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12.\n\x0b\x63reateTimer\x18\x03 \x01(\x0b\x32\x17.TimerOriginCreateTimerH\x00\x12\x32\n\rexternalEvent\x18\x04 \x01(\x0b\x32\x19.TimerOriginExternalEventH\x00\x12\x32\n\ractivityRetry\x18\x05 \x01(\x0b\x32\x19.TimerOriginActivityRetryH\x00\x12<\n\x12\x63hildWorkflowRetry\x18\x06 \x01(\x0b\x32\x1e.TimerOriginChildWorkflowRetryH\x00\x42\x08\n\x06originB\x07\n\x05_name\"p\n\x0fSendEventAction\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x0c\n\x04name\x18\x02 \x01(\t\x12*\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xaa\x02\n\x16\x43ompleteWorkflowAction\x12,\n\x0eworkflowStatus\x18\x01 \x01(\x0e\x32\x14.OrchestrationStatus\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\nnewVersion\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12&\n\x0f\x63\x61rryoverEvents\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12+\n\x0e\x66\x61ilureDetails\x18\x06 \x01(\x0b\x32\x13.TaskFailureDetails\"l\n\x17TerminateWorkflowAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x0f\n\x07recurse\x18\x03 \x01(\x08\"#\n!WorkflowVersionNotAvailableAction\"\x97\x04\n\x0eWorkflowAction\x12\n\n\x02id\x18\x01 \x01(\x05\x12+\n\x0cscheduleTask\x18\x02 \x01(\x0b\x32\x13.ScheduleTaskActionH\x00\x12\x39\n\x13\x63reateChildWorkflow\x18\x03 \x01(\x0b\x32\x1a.CreateChildWorkflowActionH\x00\x12)\n\x0b\x63reateTimer\x18\x04 \x01(\x0b\x32\x12.CreateTimerActionH\x00\x12%\n\tsendEvent\x18\x05 \x01(\x0b\x32\x10.SendEventActionH\x00\x12\x33\n\x10\x63ompleteWorkflow\x18\x06 \x01(\x0b\x32\x17.CompleteWorkflowActionH\x00\x12\x35\n\x11terminateWorkflow\x18\x07 \x01(\x0b\x32\x18.TerminateWorkflowActionH\x00\x12I\n\x1bworkflowVersionNotAvailable\x18\n \x01(\x0b\x32\".WorkflowVersionNotAvailableActionH\x00\x12?\n\x16\x63reateDetachedWorkflow\x18\x0b \x01(\x0b\x32\x1d.CreateDetachedWorkflowActionH\x00\x12 \n\x06router\x18\t \x01(\x0b\x32\x0b.TaskRouterH\x01\x88\x01\x01\x42\x14\n\x12workflowActionTypeB\t\n\x07_routerJ\x04\x08\x08\x10\tBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -36,20 +36,26 @@ if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None _globals['DESCRIPTOR']._serialized_options = b'\n+io.dapr.durabletask.implementation.protobufZ\013/api/protos\252\002\031Dapr.DurableTask.Protobuf' + _globals['_CREATEDETACHEDWORKFLOWACTION_TAGSENTRY']._loaded_options = None + _globals['_CREATEDETACHEDWORKFLOWACTION_TAGSENTRY']._serialized_options = b'8\001' _globals['_SCHEDULETASKACTION']._serialized_start=139 - _globals['_SCHEDULETASKACTION']._serialized_end=335 - _globals['_CREATECHILDWORKFLOWACTION']._serialized_start=338 - _globals['_CREATECHILDWORKFLOWACTION']._serialized_end=536 - _globals['_CREATETIMERACTION']._serialized_start=539 - _globals['_CREATETIMERACTION']._serialized_end=854 - _globals['_SENDEVENTACTION']._serialized_start=856 - _globals['_SENDEVENTACTION']._serialized_end=968 - _globals['_COMPLETEWORKFLOWACTION']._serialized_start=971 - _globals['_COMPLETEWORKFLOWACTION']._serialized_end=1269 - _globals['_TERMINATEWORKFLOWACTION']._serialized_start=1271 - _globals['_TERMINATEWORKFLOWACTION']._serialized_end=1379 - _globals['_WORKFLOWVERSIONNOTAVAILABLEACTION']._serialized_start=1381 - _globals['_WORKFLOWVERSIONNOTAVAILABLEACTION']._serialized_end=1416 - _globals['_WORKFLOWACTION']._serialized_start=1419 - _globals['_WORKFLOWACTION']._serialized_end=1889 + _globals['_SCHEDULETASKACTION']._serialized_end=427 + _globals['_CREATECHILDWORKFLOWACTION']._serialized_start=430 + _globals['_CREATECHILDWORKFLOWACTION']._serialized_end=720 + _globals['_CREATEDETACHEDWORKFLOWACTION']._serialized_start=723 + _globals['_CREATEDETACHEDWORKFLOWACTION']._serialized_end=1240 + _globals['_CREATEDETACHEDWORKFLOWACTION_TAGSENTRY']._serialized_start=1135 + _globals['_CREATEDETACHEDWORKFLOWACTION_TAGSENTRY']._serialized_end=1178 + _globals['_CREATETIMERACTION']._serialized_start=1243 + _globals['_CREATETIMERACTION']._serialized_end=1558 + _globals['_SENDEVENTACTION']._serialized_start=1560 + _globals['_SENDEVENTACTION']._serialized_end=1672 + _globals['_COMPLETEWORKFLOWACTION']._serialized_start=1675 + _globals['_COMPLETEWORKFLOWACTION']._serialized_end=1973 + _globals['_TERMINATEWORKFLOWACTION']._serialized_start=1975 + _globals['_TERMINATEWORKFLOWACTION']._serialized_end=2083 + _globals['_WORKFLOWVERSIONNOTAVAILABLEACTION']._serialized_start=2085 + _globals['_WORKFLOWVERSIONNOTAVAILABLEACTION']._serialized_end=2120 + _globals['_WORKFLOWACTION']._serialized_start=2123 + _globals['_WORKFLOWACTION']._serialized_end=2658 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.pyi index 8df2644c..1934c960 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.pyi @@ -33,8 +33,11 @@ class ScheduleTaskAction(_message.Message): INPUT_FIELD_NUMBER: _builtins.int ROUTER_FIELD_NUMBER: _builtins.int TASKEXECUTIONID_FIELD_NUMBER: _builtins.int + HISTORYPROPAGATIONSCOPE_FIELD_NUMBER: _builtins.int name: _builtins.str taskExecutionId: _builtins.str + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType + """History propagation scope. Absent/SCOPE_NONE = no propagation.""" @_builtins.property def version(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -49,13 +52,19 @@ class ScheduleTaskAction(_message.Message): input: _wrappers_pb2.StringValue | None = ..., router: _orchestration_pb2.TaskRouter | None = ..., taskExecutionId: _builtins.str = ..., + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "input", b"input", "router", b"router", "version", b"version"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_router", b"_router", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "router", b"router", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "input", b"input", "name", b"name", "router", b"router", "taskExecutionId", b"taskExecutionId", "version", b"version"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_router", b"_router", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "name", b"name", "router", b"router", "taskExecutionId", b"taskExecutionId", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__historyPropagationScope: _TypeAlias = _typing.Literal["historyPropagationScope"] # noqa: Y015 + _WhichOneofArgType__historyPropagationScope: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope"] # noqa: Y015 _WhichOneofReturnType__router: _TypeAlias = _typing.Literal["router"] # noqa: Y015 _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__historyPropagationScope) -> _WhichOneofReturnType__historyPropagationScope | None: ... + @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... Global___ScheduleTaskAction: _TypeAlias = ScheduleTaskAction # noqa: Y015 @@ -69,8 +78,11 @@ class CreateChildWorkflowAction(_message.Message): VERSION_FIELD_NUMBER: _builtins.int INPUT_FIELD_NUMBER: _builtins.int ROUTER_FIELD_NUMBER: _builtins.int + HISTORYPROPAGATIONSCOPE_FIELD_NUMBER: _builtins.int instanceId: _builtins.str name: _builtins.str + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType + """History propagation scope. Absent/SCOPE_NONE = no propagation.""" @_builtins.property def version(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -85,17 +97,122 @@ class CreateChildWorkflowAction(_message.Message): version: _wrappers_pb2.StringValue | None = ..., input: _wrappers_pb2.StringValue | None = ..., router: _orchestration_pb2.TaskRouter | None = ..., + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "input", b"input", "router", b"router", "version", b"version"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_router", b"_router", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "router", b"router", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "input", b"input", "instanceId", b"instanceId", "name", b"name", "router", b"router", "version", b"version"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_router", b"_router", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "instanceId", b"instanceId", "name", b"name", "router", b"router", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__historyPropagationScope: _TypeAlias = _typing.Literal["historyPropagationScope"] # noqa: Y015 + _WhichOneofArgType__historyPropagationScope: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope"] # noqa: Y015 _WhichOneofReturnType__router: _TypeAlias = _typing.Literal["router"] # noqa: Y015 _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__historyPropagationScope) -> _WhichOneofReturnType__historyPropagationScope | None: ... + @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... Global___CreateChildWorkflowAction: _TypeAlias = CreateChildWorkflowAction # noqa: Y015 +@_typing.final +class CreateDetachedWorkflowAction(_message.Message): + """CreateDetachedWorkflowAction creates a new, detached workflow instance from + a running workflow. Mirrors the fields of CreateInstanceRequest (the client + scheduling API) so the runtime has all the information needed to schedule + the new instance directly from this action. The spawned workflow is fully + decoupled from the caller: no parent pointer is recorded on the new + workflow, no completion is awaited, and no failure propagation flows back. + The creation is recorded once in the caller's history as a + DetachedWorkflowInstanceCreatedEvent referencing the new instance ID. + """ + + DESCRIPTOR: _descriptor.Descriptor + + @_typing.final + class TagsEntry(_message.Message): + DESCRIPTOR: _descriptor.Descriptor + + KEY_FIELD_NUMBER: _builtins.int + VALUE_FIELD_NUMBER: _builtins.int + key: _builtins.str + value: _builtins.str + def __init__( + self, + *, + key: _builtins.str = ..., + value: _builtins.str = ..., + ) -> None: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["key", b"key", "value", b"value"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + + INSTANCEID_FIELD_NUMBER: _builtins.int + NAME_FIELD_NUMBER: _builtins.int + VERSION_FIELD_NUMBER: _builtins.int + INPUT_FIELD_NUMBER: _builtins.int + SCHEDULEDSTARTTIMESTAMP_FIELD_NUMBER: _builtins.int + EXECUTIONID_FIELD_NUMBER: _builtins.int + TAGS_FIELD_NUMBER: _builtins.int + PARENTTRACECONTEXT_FIELD_NUMBER: _builtins.int + ROUTER_FIELD_NUMBER: _builtins.int + instanceId: _builtins.str + """instanceId is the ID assigned to the new workflow. It is mandatory: + implementors must set a stable, deterministic ID so that on replay the + call resolves to the same DetachedWorkflowInstanceCreatedEvent in + history. + """ + name: _builtins.str + """name of the workflow to schedule. Mandatory.""" + @_builtins.property + def version(self) -> _wrappers_pb2.StringValue: + """The remaining fields mirror the optional inputs of + CreateInstanceRequest. Wrapper types (StringValue) carry presence via + the wrapper; bare message fields are explicitly marked optional. + """ + + @_builtins.property + def input(self) -> _wrappers_pb2.StringValue: ... + @_builtins.property + def scheduledStartTimestamp(self) -> _timestamp_pb2.Timestamp: ... + @_builtins.property + def executionId(self) -> _wrappers_pb2.StringValue: ... + @_builtins.property + def tags(self) -> _containers.ScalarMap[_builtins.str, _builtins.str]: ... + @_builtins.property + def parentTraceContext(self) -> _orchestration_pb2.TraceContext: ... + @_builtins.property + def router(self) -> _orchestration_pb2.TaskRouter: ... + def __init__( + self, + *, + instanceId: _builtins.str = ..., + name: _builtins.str = ..., + version: _wrappers_pb2.StringValue | None = ..., + input: _wrappers_pb2.StringValue | None = ..., + scheduledStartTimestamp: _timestamp_pb2.Timestamp | None = ..., + executionId: _wrappers_pb2.StringValue | None = ..., + tags: _abc.Mapping[_builtins.str, _builtins.str] | None = ..., + parentTraceContext: _orchestration_pb2.TraceContext | None = ..., + router: _orchestration_pb2.TaskRouter | None = ..., + ) -> None: ... + _HasFieldArgType: _TypeAlias = _typing.Literal["_parentTraceContext", b"_parentTraceContext", "_router", b"_router", "_scheduledStartTimestamp", b"_scheduledStartTimestamp", "executionId", b"executionId", "input", b"input", "parentTraceContext", b"parentTraceContext", "router", b"router", "scheduledStartTimestamp", b"scheduledStartTimestamp", "version", b"version"] # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["_parentTraceContext", b"_parentTraceContext", "_router", b"_router", "_scheduledStartTimestamp", b"_scheduledStartTimestamp", "executionId", b"executionId", "input", b"input", "instanceId", b"instanceId", "name", b"name", "parentTraceContext", b"parentTraceContext", "router", b"router", "scheduledStartTimestamp", b"scheduledStartTimestamp", "tags", b"tags", "version", b"version"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__parentTraceContext: _TypeAlias = _typing.Literal["parentTraceContext"] # noqa: Y015 + _WhichOneofArgType__parentTraceContext: _TypeAlias = _typing.Literal["_parentTraceContext", b"_parentTraceContext"] # noqa: Y015 + _WhichOneofReturnType__router: _TypeAlias = _typing.Literal["router"] # noqa: Y015 + _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 + _WhichOneofReturnType__scheduledStartTimestamp: _TypeAlias = _typing.Literal["scheduledStartTimestamp"] # noqa: Y015 + _WhichOneofArgType__scheduledStartTimestamp: _TypeAlias = _typing.Literal["_scheduledStartTimestamp", b"_scheduledStartTimestamp"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__parentTraceContext) -> _WhichOneofReturnType__parentTraceContext | None: ... + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__scheduledStartTimestamp) -> _WhichOneofReturnType__scheduledStartTimestamp | None: ... + +Global___CreateDetachedWorkflowAction: _TypeAlias = CreateDetachedWorkflowAction # noqa: Y015 + @_typing.final class CreateTimerAction(_message.Message): DESCRIPTOR: _descriptor.Descriptor @@ -253,6 +370,7 @@ class WorkflowAction(_message.Message): COMPLETEWORKFLOW_FIELD_NUMBER: _builtins.int TERMINATEWORKFLOW_FIELD_NUMBER: _builtins.int WORKFLOWVERSIONNOTAVAILABLE_FIELD_NUMBER: _builtins.int + CREATEDETACHEDWORKFLOW_FIELD_NUMBER: _builtins.int ROUTER_FIELD_NUMBER: _builtins.int id: _builtins.int @_builtins.property @@ -270,6 +388,8 @@ class WorkflowAction(_message.Message): @_builtins.property def workflowVersionNotAvailable(self) -> Global___WorkflowVersionNotAvailableAction: ... @_builtins.property + def createDetachedWorkflow(self) -> Global___CreateDetachedWorkflowAction: ... + @_builtins.property def router(self) -> _orchestration_pb2.TaskRouter: ... def __init__( self, @@ -282,15 +402,16 @@ class WorkflowAction(_message.Message): completeWorkflow: Global___CompleteWorkflowAction | None = ..., terminateWorkflow: Global___TerminateWorkflowAction | None = ..., workflowVersionNotAvailable: Global___WorkflowVersionNotAvailableAction | None = ..., + createDetachedWorkflow: Global___CreateDetachedWorkflowAction | None = ..., router: _orchestration_pb2.TaskRouter | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "completeWorkflow", b"completeWorkflow", "createChildWorkflow", b"createChildWorkflow", "createTimer", b"createTimer", "router", b"router", "scheduleTask", b"scheduleTask", "sendEvent", b"sendEvent", "terminateWorkflow", b"terminateWorkflow", "workflowActionType", b"workflowActionType", "workflowVersionNotAvailable", b"workflowVersionNotAvailable"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "completeWorkflow", b"completeWorkflow", "createChildWorkflow", b"createChildWorkflow", "createDetachedWorkflow", b"createDetachedWorkflow", "createTimer", b"createTimer", "router", b"router", "scheduleTask", b"scheduleTask", "sendEvent", b"sendEvent", "terminateWorkflow", b"terminateWorkflow", "workflowActionType", b"workflowActionType", "workflowVersionNotAvailable", b"workflowVersionNotAvailable"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "completeWorkflow", b"completeWorkflow", "createChildWorkflow", b"createChildWorkflow", "createTimer", b"createTimer", "id", b"id", "router", b"router", "scheduleTask", b"scheduleTask", "sendEvent", b"sendEvent", "terminateWorkflow", b"terminateWorkflow", "workflowActionType", b"workflowActionType", "workflowVersionNotAvailable", b"workflowVersionNotAvailable"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "completeWorkflow", b"completeWorkflow", "createChildWorkflow", b"createChildWorkflow", "createDetachedWorkflow", b"createDetachedWorkflow", "createTimer", b"createTimer", "id", b"id", "router", b"router", "scheduleTask", b"scheduleTask", "sendEvent", b"sendEvent", "terminateWorkflow", b"terminateWorkflow", "workflowActionType", b"workflowActionType", "workflowVersionNotAvailable", b"workflowVersionNotAvailable"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__router: _TypeAlias = _typing.Literal["router"] # noqa: Y015 _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 - _WhichOneofReturnType_workflowActionType: _TypeAlias = _typing.Literal["scheduleTask", "createChildWorkflow", "createTimer", "sendEvent", "completeWorkflow", "terminateWorkflow", "workflowVersionNotAvailable"] # noqa: Y015 + _WhichOneofReturnType_workflowActionType: _TypeAlias = _typing.Literal["scheduleTask", "createChildWorkflow", "createTimer", "sendEvent", "completeWorkflow", "terminateWorkflow", "workflowVersionNotAvailable", "createDetachedWorkflow"] # noqa: Y015 _WhichOneofArgType_workflowActionType: _TypeAlias = _typing.Literal["workflowActionType", b"workflowActionType"] # noqa: Y015 @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.py index a3255f81..e9c39e64 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.py @@ -30,7 +30,7 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aorchestrator_service.proto\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1aorchestrator_actions.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1bgoogle/protobuf/empty.proto\"\xfc\x01\n\x0f\x41\x63tivityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12\x0e\n\x06taskId\x18\x05 \x01(\x05\x12)\n\x12parentTraceContext\x18\x06 \x01(\x0b\x32\r.TraceContext\x12\x17\n\x0ftaskExecutionId\x18\x07 \x01(\t\"\xaa\x01\n\x10\x41\x63tivityResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0e\n\x06taskId\x18\x02 \x01(\x05\x12,\n\x06result\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x04 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x17\n\x0f\x63ompletionToken\x18\x05 \x01(\t\"\xf2\x01\n\x0fWorkflowRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x31\n\x0b\x65xecutionId\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12!\n\npastEvents\x18\x03 \x03(\x0b\x32\r.HistoryEvent\x12 \n\tnewEvents\x18\x04 \x03(\x0b\x32\r.HistoryEvent\x12 \n\x18requiresHistoryStreaming\x18\x06 \x01(\x08\x12 \n\x06router\x18\x07 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x42\t\n\x07_routerJ\x04\x08\x05\x10\x06\"\x82\x02\n\x10WorkflowResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12 \n\x07\x61\x63tions\x18\x02 \x03(\x0b\x32\x0f.WorkflowAction\x12\x32\n\x0c\x63ustomStatus\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x17\n\x0f\x63ompletionToken\x18\x04 \x01(\t\x12\x37\n\x12numEventsProcessed\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12&\n\x07version\x18\x06 \x01(\x0b\x32\x10.WorkflowVersionH\x00\x88\x01\x01\x42\n\n\x08_version\"\xaf\x03\n\x15\x43reateInstanceRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12;\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\x0b\x65xecutionId\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x04tags\x18\x08 \x03(\x0b\x32 .CreateInstanceRequest.TagsEntry\x12)\n\x12parentTraceContext\x18\t \x01(\x0b\x32\r.TraceContext\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01J\x04\x08\x06\x10\x07R\x1aorchestrationIdReusePolicy\",\n\x16\x43reateInstanceResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\"E\n\x12GetInstanceRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x1b\n\x13getInputsAndOutputs\x18\x02 \x01(\x08\"L\n\x13GetInstanceResponse\x12\x0e\n\x06\x65xists\x18\x01 \x01(\x08\x12%\n\rworkflowState\x18\x02 \x01(\x0b\x32\x0e.WorkflowState\"b\n\x11RaiseEventRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x14\n\x12RaiseEventResponse\"g\n\x10TerminateRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06output\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x11\n\trecursive\x18\x03 \x01(\x08\"\x13\n\x11TerminateResponse\"R\n\x0eSuspendRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x11\n\x0fSuspendResponse\"Q\n\rResumeRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x10\n\x0eResumeResponse\"\x9e\x01\n\x15PurgeInstancesRequest\x12\x14\n\ninstanceId\x18\x01 \x01(\tH\x00\x12\x33\n\x13purgeInstanceFilter\x18\x02 \x01(\x0b\x32\x14.PurgeInstanceFilterH\x00\x12\x11\n\trecursive\x18\x03 \x01(\x08\x12\x12\n\x05\x66orce\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\t\n\x07requestB\x08\n\x06_force\"\xaa\x01\n\x13PurgeInstanceFilter\x12\x33\n\x0f\x63reatedTimeFrom\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rcreatedTimeTo\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\rruntimeStatus\x18\x03 \x03(\x0e\x32\x14.OrchestrationStatus\"f\n\x16PurgeInstancesResponse\x12\x1c\n\x14\x64\x65letedInstanceCount\x18\x01 \x01(\x05\x12.\n\nisComplete\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\"-\n\x13GetWorkItemsRequestJ\x04\x08\x01\x10\x02J\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04J\x04\x08\n\x10\x0b\"\x9a\x01\n\x08WorkItem\x12+\n\x0fworkflowRequest\x18\x01 \x01(\x0b\x32\x10.WorkflowRequestH\x00\x12+\n\x0f\x61\x63tivityRequest\x18\x02 \x01(\x0b\x32\x10.ActivityRequestH\x00\x12\x17\n\x0f\x63ompletionToken\x18\n \x01(\tB\t\n\x07requestJ\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06\"\x16\n\x14\x43ompleteTaskResponse\"\x85\x02\n\x1dRerunWorkflowFromEventRequest\x12\x18\n\x10sourceInstanceID\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventID\x18\x02 \x01(\r\x12\x1a\n\rnewInstanceID\x18\x03 \x01(\tH\x00\x88\x01\x01\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x16\n\x0eoverwriteInput\x18\x05 \x01(\x08\x12\'\n\x1anewChildWorkflowInstanceID\x18\x06 \x01(\tH\x01\x88\x01\x01\x42\x10\n\x0e_newInstanceIDB\x1d\n\x1b_newChildWorkflowInstanceID\"7\n\x1eRerunWorkflowFromEventResponse\x12\x15\n\rnewInstanceID\x18\x01 \x01(\t\"r\n\x16ListInstanceIDsRequest\x12\x1e\n\x11\x63ontinuationToken\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08pageSize\x18\x02 \x01(\rH\x01\x88\x01\x01\x42\x14\n\x12_continuationTokenB\x0b\n\t_pageSize\"d\n\x17ListInstanceIDsResponse\x12\x13\n\x0binstanceIds\x18\x01 \x03(\t\x12\x1e\n\x11\x63ontinuationToken\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x14\n\x12_continuationToken\"/\n\x19GetInstanceHistoryRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\";\n\x1aGetInstanceHistoryResponse\x12\x1d\n\x06\x65vents\x18\x01 \x03(\x0b\x32\r.HistoryEvent*^\n\x10WorkerCapability\x12!\n\x1dWORKER_CAPABILITY_UNSPECIFIED\x10\x00\x12\'\n#WORKER_CAPABILITY_HISTORY_STREAMING\x10\x01\x32\xe8\x08\n\x15TaskHubSidecarService\x12\x37\n\x05Hello\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.Empty\x12@\n\rStartInstance\x12\x16.CreateInstanceRequest\x1a\x17.CreateInstanceResponse\x12\x38\n\x0bGetInstance\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x41\n\x14WaitForInstanceStart\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x46\n\x19WaitForInstanceCompletion\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x35\n\nRaiseEvent\x12\x12.RaiseEventRequest\x1a\x13.RaiseEventResponse\x12:\n\x11TerminateInstance\x12\x11.TerminateRequest\x1a\x12.TerminateResponse\x12\x34\n\x0fSuspendInstance\x12\x0f.SuspendRequest\x1a\x10.SuspendResponse\x12\x31\n\x0eResumeInstance\x12\x0e.ResumeRequest\x1a\x0f.ResumeResponse\x12\x41\n\x0ePurgeInstances\x12\x16.PurgeInstancesRequest\x1a\x17.PurgeInstancesResponse\x12\x31\n\x0cGetWorkItems\x12\x14.GetWorkItemsRequest\x1a\t.WorkItem0\x01\x12@\n\x14\x43ompleteActivityTask\x12\x11.ActivityResponse\x1a\x15.CompleteTaskResponse\x12I\n\x18\x43ompleteOrchestratorTask\x12\x11.WorkflowResponse\x1a\x15.CompleteTaskResponse\"\x03\x88\x02\x01\x12@\n\x14\x43ompleteWorkflowTask\x12\x11.WorkflowResponse\x1a\x15.CompleteTaskResponse\x12Y\n\x16RerunWorkflowFromEvent\x12\x1e.RerunWorkflowFromEventRequest\x1a\x1f.RerunWorkflowFromEventResponse\x12\x44\n\x0fListInstanceIDs\x12\x17.ListInstanceIDsRequest\x1a\x18.ListInstanceIDsResponse\x12M\n\x12GetInstanceHistory\x12\x1a.GetInstanceHistoryRequest\x1a\x1b.GetInstanceHistoryResponseBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aorchestrator_service.proto\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1aorchestrator_actions.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1bgoogle/protobuf/empty.proto\"\xc6\x02\n\x0f\x41\x63tivityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12\x0e\n\x06taskId\x18\x05 \x01(\x05\x12)\n\x12parentTraceContext\x18\x06 \x01(\x0b\x32\r.TraceContext\x12\x17\n\x0ftaskExecutionId\x18\x07 \x01(\t\x12\x32\n\x11propagatedHistory\x18\x08 \x01(\x0b\x32\x12.PropagatedHistoryH\x00\x88\x01\x01\x42\x14\n\x12_propagatedHistory\"\xaa\x01\n\x10\x41\x63tivityResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0e\n\x06taskId\x18\x02 \x01(\x05\x12,\n\x06result\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x04 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x17\n\x0f\x63ompletionToken\x18\x05 \x01(\t\"\xbc\x02\n\x0fWorkflowRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x31\n\x0b\x65xecutionId\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12!\n\npastEvents\x18\x03 \x03(\x0b\x32\r.HistoryEvent\x12 \n\tnewEvents\x18\x04 \x03(\x0b\x32\r.HistoryEvent\x12 \n\x18requiresHistoryStreaming\x18\x06 \x01(\x08\x12 \n\x06router\x18\x07 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x12\x32\n\x11propagatedHistory\x18\x08 \x01(\x0b\x32\x12.PropagatedHistoryH\x01\x88\x01\x01\x42\t\n\x07_routerB\x14\n\x12_propagatedHistoryJ\x04\x08\x05\x10\x06\"\x82\x02\n\x10WorkflowResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12 \n\x07\x61\x63tions\x18\x02 \x03(\x0b\x32\x0f.WorkflowAction\x12\x32\n\x0c\x63ustomStatus\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x17\n\x0f\x63ompletionToken\x18\x04 \x01(\t\x12\x37\n\x12numEventsProcessed\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12&\n\x07version\x18\x06 \x01(\x0b\x32\x10.WorkflowVersionH\x00\x88\x01\x01\x42\n\n\x08_version\"\xaf\x03\n\x15\x43reateInstanceRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12;\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\x0b\x65xecutionId\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x04tags\x18\x08 \x03(\x0b\x32 .CreateInstanceRequest.TagsEntry\x12)\n\x12parentTraceContext\x18\t \x01(\x0b\x32\r.TraceContext\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01J\x04\x08\x06\x10\x07R\x1aorchestrationIdReusePolicy\",\n\x16\x43reateInstanceResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\"E\n\x12GetInstanceRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x1b\n\x13getInputsAndOutputs\x18\x02 \x01(\x08\"L\n\x13GetInstanceResponse\x12\x0e\n\x06\x65xists\x18\x01 \x01(\x08\x12%\n\rworkflowState\x18\x02 \x01(\x0b\x32\x0e.WorkflowState\"b\n\x11RaiseEventRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x14\n\x12RaiseEventResponse\"g\n\x10TerminateRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06output\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x11\n\trecursive\x18\x03 \x01(\x08\"\x13\n\x11TerminateResponse\"R\n\x0eSuspendRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x11\n\x0fSuspendResponse\"Q\n\rResumeRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x10\n\x0eResumeResponse\"\x9e\x01\n\x15PurgeInstancesRequest\x12\x14\n\ninstanceId\x18\x01 \x01(\tH\x00\x12\x33\n\x13purgeInstanceFilter\x18\x02 \x01(\x0b\x32\x14.PurgeInstanceFilterH\x00\x12\x11\n\trecursive\x18\x03 \x01(\x08\x12\x12\n\x05\x66orce\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\t\n\x07requestB\x08\n\x06_force\"\xaa\x01\n\x13PurgeInstanceFilter\x12\x33\n\x0f\x63reatedTimeFrom\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rcreatedTimeTo\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\rruntimeStatus\x18\x03 \x03(\x0e\x32\x14.OrchestrationStatus\"f\n\x16PurgeInstancesResponse\x12\x1c\n\x14\x64\x65letedInstanceCount\x18\x01 \x01(\x05\x12.\n\nisComplete\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\"-\n\x13GetWorkItemsRequestJ\x04\x08\x01\x10\x02J\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04J\x04\x08\n\x10\x0b\"\x9a\x01\n\x08WorkItem\x12+\n\x0fworkflowRequest\x18\x01 \x01(\x0b\x32\x10.WorkflowRequestH\x00\x12+\n\x0f\x61\x63tivityRequest\x18\x02 \x01(\x0b\x32\x10.ActivityRequestH\x00\x12\x17\n\x0f\x63ompletionToken\x18\n \x01(\tB\t\n\x07requestJ\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06\"\x16\n\x14\x43ompleteTaskResponse\"\x85\x02\n\x1dRerunWorkflowFromEventRequest\x12\x18\n\x10sourceInstanceID\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventID\x18\x02 \x01(\r\x12\x1a\n\rnewInstanceID\x18\x03 \x01(\tH\x00\x88\x01\x01\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x16\n\x0eoverwriteInput\x18\x05 \x01(\x08\x12\'\n\x1anewChildWorkflowInstanceID\x18\x06 \x01(\tH\x01\x88\x01\x01\x42\x10\n\x0e_newInstanceIDB\x1d\n\x1b_newChildWorkflowInstanceID\"7\n\x1eRerunWorkflowFromEventResponse\x12\x15\n\rnewInstanceID\x18\x01 \x01(\t\"r\n\x16ListInstanceIDsRequest\x12\x1e\n\x11\x63ontinuationToken\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08pageSize\x18\x02 \x01(\rH\x01\x88\x01\x01\x42\x14\n\x12_continuationTokenB\x0b\n\t_pageSize\"d\n\x17ListInstanceIDsResponse\x12\x13\n\x0binstanceIds\x18\x01 \x03(\t\x12\x1e\n\x11\x63ontinuationToken\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x14\n\x12_continuationToken\"/\n\x19GetInstanceHistoryRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\";\n\x1aGetInstanceHistoryResponse\x12\x1d\n\x06\x65vents\x18\x01 \x03(\x0b\x32\r.HistoryEvent*^\n\x10WorkerCapability\x12!\n\x1dWORKER_CAPABILITY_UNSPECIFIED\x10\x00\x12\'\n#WORKER_CAPABILITY_HISTORY_STREAMING\x10\x01\x32\xe8\x08\n\x15TaskHubSidecarService\x12\x37\n\x05Hello\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.Empty\x12@\n\rStartInstance\x12\x16.CreateInstanceRequest\x1a\x17.CreateInstanceResponse\x12\x38\n\x0bGetInstance\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x41\n\x14WaitForInstanceStart\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x46\n\x19WaitForInstanceCompletion\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x35\n\nRaiseEvent\x12\x12.RaiseEventRequest\x1a\x13.RaiseEventResponse\x12:\n\x11TerminateInstance\x12\x11.TerminateRequest\x1a\x12.TerminateResponse\x12\x34\n\x0fSuspendInstance\x12\x0f.SuspendRequest\x1a\x10.SuspendResponse\x12\x31\n\x0eResumeInstance\x12\x0e.ResumeRequest\x1a\x0f.ResumeResponse\x12\x41\n\x0ePurgeInstances\x12\x16.PurgeInstancesRequest\x1a\x17.PurgeInstancesResponse\x12\x31\n\x0cGetWorkItems\x12\x14.GetWorkItemsRequest\x1a\t.WorkItem0\x01\x12@\n\x14\x43ompleteActivityTask\x12\x11.ActivityResponse\x1a\x15.CompleteTaskResponse\x12I\n\x18\x43ompleteOrchestratorTask\x12\x11.WorkflowResponse\x1a\x15.CompleteTaskResponse\"\x03\x88\x02\x01\x12@\n\x14\x43ompleteWorkflowTask\x12\x11.WorkflowResponse\x1a\x15.CompleteTaskResponse\x12Y\n\x16RerunWorkflowFromEvent\x12\x1e.RerunWorkflowFromEventRequest\x1a\x1f.RerunWorkflowFromEventResponse\x12\x44\n\x0fListInstanceIDs\x12\x17.ListInstanceIDsRequest\x1a\x18.ListInstanceIDsResponse\x12M\n\x12GetInstanceHistory\x12\x1a.GetInstanceHistoryRequest\x1a\x1b.GetInstanceHistoryResponseBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -42,66 +42,66 @@ _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_options = b'8\001' _globals['_TASKHUBSIDECARSERVICE'].methods_by_name['CompleteOrchestratorTask']._loaded_options = None _globals['_TASKHUBSIDECARSERVICE'].methods_by_name['CompleteOrchestratorTask']._serialized_options = b'\210\002\001' - _globals['_WORKERCAPABILITY']._serialized_start=3525 - _globals['_WORKERCAPABILITY']._serialized_end=3619 + _globals['_WORKERCAPABILITY']._serialized_start=3673 + _globals['_WORKERCAPABILITY']._serialized_end=3767 _globals['_ACTIVITYREQUEST']._serialized_start=196 - _globals['_ACTIVITYREQUEST']._serialized_end=448 - _globals['_ACTIVITYRESPONSE']._serialized_start=451 - _globals['_ACTIVITYRESPONSE']._serialized_end=621 - _globals['_WORKFLOWREQUEST']._serialized_start=624 - _globals['_WORKFLOWREQUEST']._serialized_end=866 - _globals['_WORKFLOWRESPONSE']._serialized_start=869 - _globals['_WORKFLOWRESPONSE']._serialized_end=1127 - _globals['_CREATEINSTANCEREQUEST']._serialized_start=1130 - _globals['_CREATEINSTANCEREQUEST']._serialized_end=1561 - _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_start=1484 - _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_end=1527 - _globals['_CREATEINSTANCERESPONSE']._serialized_start=1563 - _globals['_CREATEINSTANCERESPONSE']._serialized_end=1607 - _globals['_GETINSTANCEREQUEST']._serialized_start=1609 - _globals['_GETINSTANCEREQUEST']._serialized_end=1678 - _globals['_GETINSTANCERESPONSE']._serialized_start=1680 - _globals['_GETINSTANCERESPONSE']._serialized_end=1756 - _globals['_RAISEEVENTREQUEST']._serialized_start=1758 - _globals['_RAISEEVENTREQUEST']._serialized_end=1856 - _globals['_RAISEEVENTRESPONSE']._serialized_start=1858 - _globals['_RAISEEVENTRESPONSE']._serialized_end=1878 - _globals['_TERMINATEREQUEST']._serialized_start=1880 - _globals['_TERMINATEREQUEST']._serialized_end=1983 - _globals['_TERMINATERESPONSE']._serialized_start=1985 - _globals['_TERMINATERESPONSE']._serialized_end=2004 - _globals['_SUSPENDREQUEST']._serialized_start=2006 - _globals['_SUSPENDREQUEST']._serialized_end=2088 - _globals['_SUSPENDRESPONSE']._serialized_start=2090 - _globals['_SUSPENDRESPONSE']._serialized_end=2107 - _globals['_RESUMEREQUEST']._serialized_start=2109 - _globals['_RESUMEREQUEST']._serialized_end=2190 - _globals['_RESUMERESPONSE']._serialized_start=2192 - _globals['_RESUMERESPONSE']._serialized_end=2208 - _globals['_PURGEINSTANCESREQUEST']._serialized_start=2211 - _globals['_PURGEINSTANCESREQUEST']._serialized_end=2369 - _globals['_PURGEINSTANCEFILTER']._serialized_start=2372 - _globals['_PURGEINSTANCEFILTER']._serialized_end=2542 - _globals['_PURGEINSTANCESRESPONSE']._serialized_start=2544 - _globals['_PURGEINSTANCESRESPONSE']._serialized_end=2646 - _globals['_GETWORKITEMSREQUEST']._serialized_start=2648 - _globals['_GETWORKITEMSREQUEST']._serialized_end=2693 - _globals['_WORKITEM']._serialized_start=2696 - _globals['_WORKITEM']._serialized_end=2850 - _globals['_COMPLETETASKRESPONSE']._serialized_start=2852 - _globals['_COMPLETETASKRESPONSE']._serialized_end=2874 - _globals['_RERUNWORKFLOWFROMEVENTREQUEST']._serialized_start=2877 - _globals['_RERUNWORKFLOWFROMEVENTREQUEST']._serialized_end=3138 - _globals['_RERUNWORKFLOWFROMEVENTRESPONSE']._serialized_start=3140 - _globals['_RERUNWORKFLOWFROMEVENTRESPONSE']._serialized_end=3195 - _globals['_LISTINSTANCEIDSREQUEST']._serialized_start=3197 - _globals['_LISTINSTANCEIDSREQUEST']._serialized_end=3311 - _globals['_LISTINSTANCEIDSRESPONSE']._serialized_start=3313 - _globals['_LISTINSTANCEIDSRESPONSE']._serialized_end=3413 - _globals['_GETINSTANCEHISTORYREQUEST']._serialized_start=3415 - _globals['_GETINSTANCEHISTORYREQUEST']._serialized_end=3462 - _globals['_GETINSTANCEHISTORYRESPONSE']._serialized_start=3464 - _globals['_GETINSTANCEHISTORYRESPONSE']._serialized_end=3523 - _globals['_TASKHUBSIDECARSERVICE']._serialized_start=3622 - _globals['_TASKHUBSIDECARSERVICE']._serialized_end=4750 + _globals['_ACTIVITYREQUEST']._serialized_end=522 + _globals['_ACTIVITYRESPONSE']._serialized_start=525 + _globals['_ACTIVITYRESPONSE']._serialized_end=695 + _globals['_WORKFLOWREQUEST']._serialized_start=698 + _globals['_WORKFLOWREQUEST']._serialized_end=1014 + _globals['_WORKFLOWRESPONSE']._serialized_start=1017 + _globals['_WORKFLOWRESPONSE']._serialized_end=1275 + _globals['_CREATEINSTANCEREQUEST']._serialized_start=1278 + _globals['_CREATEINSTANCEREQUEST']._serialized_end=1709 + _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_start=1632 + _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_end=1675 + _globals['_CREATEINSTANCERESPONSE']._serialized_start=1711 + _globals['_CREATEINSTANCERESPONSE']._serialized_end=1755 + _globals['_GETINSTANCEREQUEST']._serialized_start=1757 + _globals['_GETINSTANCEREQUEST']._serialized_end=1826 + _globals['_GETINSTANCERESPONSE']._serialized_start=1828 + _globals['_GETINSTANCERESPONSE']._serialized_end=1904 + _globals['_RAISEEVENTREQUEST']._serialized_start=1906 + _globals['_RAISEEVENTREQUEST']._serialized_end=2004 + _globals['_RAISEEVENTRESPONSE']._serialized_start=2006 + _globals['_RAISEEVENTRESPONSE']._serialized_end=2026 + _globals['_TERMINATEREQUEST']._serialized_start=2028 + _globals['_TERMINATEREQUEST']._serialized_end=2131 + _globals['_TERMINATERESPONSE']._serialized_start=2133 + _globals['_TERMINATERESPONSE']._serialized_end=2152 + _globals['_SUSPENDREQUEST']._serialized_start=2154 + _globals['_SUSPENDREQUEST']._serialized_end=2236 + _globals['_SUSPENDRESPONSE']._serialized_start=2238 + _globals['_SUSPENDRESPONSE']._serialized_end=2255 + _globals['_RESUMEREQUEST']._serialized_start=2257 + _globals['_RESUMEREQUEST']._serialized_end=2338 + _globals['_RESUMERESPONSE']._serialized_start=2340 + _globals['_RESUMERESPONSE']._serialized_end=2356 + _globals['_PURGEINSTANCESREQUEST']._serialized_start=2359 + _globals['_PURGEINSTANCESREQUEST']._serialized_end=2517 + _globals['_PURGEINSTANCEFILTER']._serialized_start=2520 + _globals['_PURGEINSTANCEFILTER']._serialized_end=2690 + _globals['_PURGEINSTANCESRESPONSE']._serialized_start=2692 + _globals['_PURGEINSTANCESRESPONSE']._serialized_end=2794 + _globals['_GETWORKITEMSREQUEST']._serialized_start=2796 + _globals['_GETWORKITEMSREQUEST']._serialized_end=2841 + _globals['_WORKITEM']._serialized_start=2844 + _globals['_WORKITEM']._serialized_end=2998 + _globals['_COMPLETETASKRESPONSE']._serialized_start=3000 + _globals['_COMPLETETASKRESPONSE']._serialized_end=3022 + _globals['_RERUNWORKFLOWFROMEVENTREQUEST']._serialized_start=3025 + _globals['_RERUNWORKFLOWFROMEVENTREQUEST']._serialized_end=3286 + _globals['_RERUNWORKFLOWFROMEVENTRESPONSE']._serialized_start=3288 + _globals['_RERUNWORKFLOWFROMEVENTRESPONSE']._serialized_end=3343 + _globals['_LISTINSTANCEIDSREQUEST']._serialized_start=3345 + _globals['_LISTINSTANCEIDSREQUEST']._serialized_end=3459 + _globals['_LISTINSTANCEIDSRESPONSE']._serialized_start=3461 + _globals['_LISTINSTANCEIDSRESPONSE']._serialized_end=3561 + _globals['_GETINSTANCEHISTORYREQUEST']._serialized_start=3563 + _globals['_GETINSTANCEHISTORYREQUEST']._serialized_end=3610 + _globals['_GETINSTANCEHISTORYRESPONSE']._serialized_start=3612 + _globals['_GETINSTANCEHISTORYRESPONSE']._serialized_end=3671 + _globals['_TASKHUBSIDECARSERVICE']._serialized_start=3770 + _globals['_TASKHUBSIDECARSERVICE']._serialized_end=4898 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.pyi index 8786348e..7fa9ffde 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.pyi @@ -62,6 +62,7 @@ class ActivityRequest(_message.Message): TASKID_FIELD_NUMBER: _builtins.int PARENTTRACECONTEXT_FIELD_NUMBER: _builtins.int TASKEXECUTIONID_FIELD_NUMBER: _builtins.int + PROPAGATEDHISTORY_FIELD_NUMBER: _builtins.int name: _builtins.str taskId: _builtins.int taskExecutionId: _builtins.str @@ -73,6 +74,13 @@ class ActivityRequest(_message.Message): def workflowInstance(self) -> _orchestration_pb2.WorkflowInstance: ... @_builtins.property def parentTraceContext(self) -> _orchestration_pb2.TraceContext: ... + @_builtins.property + def propagatedHistory(self) -> _history_events_pb2.PropagatedHistory: + """Propagated history from the calling workflow. + Delivered via the work item stream to the SDK, so that the + activity function can access it via ctx. + """ + def __init__( self, *, @@ -83,11 +91,15 @@ class ActivityRequest(_message.Message): taskId: _builtins.int = ..., parentTraceContext: _orchestration_pb2.TraceContext | None = ..., taskExecutionId: _builtins.str = ..., + propagatedHistory: _history_events_pb2.PropagatedHistory | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "parentTraceContext", b"parentTraceContext", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "input", b"input", "parentTraceContext", b"parentTraceContext", "propagatedHistory", b"propagatedHistory", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "name", b"name", "parentTraceContext", b"parentTraceContext", "taskExecutionId", b"taskExecutionId", "taskId", b"taskId", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "input", b"input", "name", b"name", "parentTraceContext", b"parentTraceContext", "propagatedHistory", b"propagatedHistory", "taskExecutionId", b"taskExecutionId", "taskId", b"taskId", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__propagatedHistory: _TypeAlias = _typing.Literal["propagatedHistory"] # noqa: Y015 + _WhichOneofArgType__propagatedHistory: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory"] # noqa: Y015 + def WhichOneof(self, oneof_group: _WhichOneofArgType__propagatedHistory) -> _WhichOneofReturnType__propagatedHistory | None: ... Global___ActivityRequest: _TypeAlias = ActivityRequest # noqa: Y015 @@ -133,6 +145,7 @@ class WorkflowRequest(_message.Message): NEWEVENTS_FIELD_NUMBER: _builtins.int REQUIRESHISTORYSTREAMING_FIELD_NUMBER: _builtins.int ROUTER_FIELD_NUMBER: _builtins.int + PROPAGATEDHISTORY_FIELD_NUMBER: _builtins.int instanceId: _builtins.str requiresHistoryStreaming: _builtins.bool @_builtins.property @@ -143,6 +156,13 @@ class WorkflowRequest(_message.Message): def newEvents(self) -> _containers.RepeatedCompositeFieldContainer[_history_events_pb2.HistoryEvent]: ... @_builtins.property def router(self) -> _orchestration_pb2.TaskRouter: ... + @_builtins.property + def propagatedHistory(self) -> _history_events_pb2.PropagatedHistory: + """Propagated history from a parent workflow. + Delivered via the work item stream to the SDK, so that the + workflow function can access it via ctx. + """ + def __init__( self, *, @@ -152,13 +172,19 @@ class WorkflowRequest(_message.Message): newEvents: _abc.Iterable[_history_events_pb2.HistoryEvent] | None = ..., requiresHistoryStreaming: _builtins.bool = ..., router: _orchestration_pb2.TaskRouter | None = ..., + propagatedHistory: _history_events_pb2.PropagatedHistory | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "executionId", b"executionId", "router", b"router"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "_router", b"_router", "executionId", b"executionId", "propagatedHistory", b"propagatedHistory", "router", b"router"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "executionId", b"executionId", "instanceId", b"instanceId", "newEvents", b"newEvents", "pastEvents", b"pastEvents", "requiresHistoryStreaming", b"requiresHistoryStreaming", "router", b"router"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "_router", b"_router", "executionId", b"executionId", "instanceId", b"instanceId", "newEvents", b"newEvents", "pastEvents", b"pastEvents", "propagatedHistory", b"propagatedHistory", "requiresHistoryStreaming", b"requiresHistoryStreaming", "router", b"router"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__propagatedHistory: _TypeAlias = _typing.Literal["propagatedHistory"] # noqa: Y015 + _WhichOneofArgType__propagatedHistory: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory"] # noqa: Y015 _WhichOneofReturnType__router: _TypeAlias = _typing.Literal["router"] # noqa: Y015 _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__propagatedHistory) -> _WhichOneofReturnType__propagatedHistory | None: ... + @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... Global___WorkflowRequest: _TypeAlias = WorkflowRequest # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.py index 4e1561f4..645e902d 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.py @@ -28,7 +28,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13runtime_state.proto\x12\x1d\x64urabletask.protos.backend.v1\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"_\n\x13RuntimeStateStalled\x12\x1e\n\x06reason\x18\x01 \x01(\x0e\x32\x0e.StalledReason\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_description\"\xbc\x05\n\x14WorkflowRuntimeState\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12 \n\tnewEvents\x18\x02 \x03(\x0b\x32\r.HistoryEvent\x12 \n\toldEvents\x18\x03 \x03(\x0b\x32\r.HistoryEvent\x12#\n\x0cpendingTasks\x18\x04 \x03(\x0b\x32\r.HistoryEvent\x12$\n\rpendingTimers\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12S\n\x0fpendingMessages\x18\x06 \x03(\x0b\x32:.durabletask.protos.backend.v1.WorkflowRuntimeStateMessage\x12*\n\nstartEvent\x18\x07 \x01(\x0b\x32\x16.ExecutionStartedEvent\x12\x30\n\x0e\x63ompletedEvent\x18\x08 \x01(\x0b\x32\x18.ExecutionCompletedEvent\x12/\n\x0b\x63reatedTime\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x0flastUpdatedTime\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rcompletedTime\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x16\n\x0e\x63ontinuedAsNew\x18\x0c \x01(\x08\x12\x13\n\x0bisSuspended\x18\r \x01(\x08\x12\x32\n\x0c\x63ustomStatus\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12H\n\x07stalled\x18\x0f \x01(\x0b\x32\x32.durabletask.protos.backend.v1.RuntimeStateStalledH\x00\x88\x01\x01\x42\n\n\x08_stalled\"\\\n\x1bWorkflowRuntimeStateMessage\x12#\n\x0chistoryEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x18\n\x10targetInstanceId\x18\x02 \x01(\tBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13runtime_state.proto\x12\x1d\x64urabletask.protos.backend.v1\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"_\n\x13RuntimeStateStalled\x12\x1e\n\x06reason\x18\x01 \x01(\x0e\x32\x0e.StalledReason\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_description\"\xbc\x05\n\x14WorkflowRuntimeState\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12 \n\tnewEvents\x18\x02 \x03(\x0b\x32\r.HistoryEvent\x12 \n\toldEvents\x18\x03 \x03(\x0b\x32\r.HistoryEvent\x12#\n\x0cpendingTasks\x18\x04 \x03(\x0b\x32\r.HistoryEvent\x12$\n\rpendingTimers\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12S\n\x0fpendingMessages\x18\x06 \x03(\x0b\x32:.durabletask.protos.backend.v1.WorkflowRuntimeStateMessage\x12*\n\nstartEvent\x18\x07 \x01(\x0b\x32\x16.ExecutionStartedEvent\x12\x30\n\x0e\x63ompletedEvent\x18\x08 \x01(\x0b\x32\x18.ExecutionCompletedEvent\x12/\n\x0b\x63reatedTime\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x0flastUpdatedTime\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rcompletedTime\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x16\n\x0e\x63ontinuedAsNew\x18\x0c \x01(\x08\x12\x13\n\x0bisSuspended\x18\r \x01(\x08\x12\x32\n\x0c\x63ustomStatus\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12H\n\x07stalled\x18\x0f \x01(\x0b\x32\x32.durabletask.protos.backend.v1.RuntimeStateStalledH\x00\x88\x01\x01\x42\n\n\x08_stalled\"\xa6\x01\n\x1bWorkflowRuntimeStateMessage\x12#\n\x0chistoryEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x18\n\x10targetInstanceId\x18\x02 \x01(\t\x12\x32\n\x11propagatedHistory\x18\x03 \x01(\x0b\x32\x12.PropagatedHistoryH\x00\x88\x01\x01\x42\x14\n\x12_propagatedHistoryBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -40,6 +40,6 @@ _globals['_RUNTIMESTATESTALLED']._serialized_end=257 _globals['_WORKFLOWRUNTIMESTATE']._serialized_start=260 _globals['_WORKFLOWRUNTIMESTATE']._serialized_end=960 - _globals['_WORKFLOWRUNTIMESTATEMESSAGE']._serialized_start=962 - _globals['_WORKFLOWRUNTIMESTATEMESSAGE']._serialized_end=1054 + _globals['_WORKFLOWRUNTIMESTATEMESSAGE']._serialized_start=963 + _globals['_WORKFLOWRUNTIMESTATEMESSAGE']._serialized_end=1129 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.pyi index fd0b75f8..6f41ceee 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.pyi @@ -142,18 +142,31 @@ class WorkflowRuntimeStateMessage(_message.Message): HISTORYEVENT_FIELD_NUMBER: _builtins.int TARGETINSTANCEID_FIELD_NUMBER: _builtins.int + PROPAGATEDHISTORY_FIELD_NUMBER: _builtins.int targetInstanceId: _builtins.str @_builtins.property def historyEvent(self) -> _history_events_pb2.HistoryEvent: ... + @_builtins.property + def propagatedHistory(self) -> _history_events_pb2.PropagatedHistory: + """Propagated history to deliver to the child workflow. + This is a transport field used when creating child workflows with + history propagation enabled. It is NOT stored as part of any + workflow's history events. + """ + def __init__( self, *, historyEvent: _history_events_pb2.HistoryEvent | None = ..., targetInstanceId: _builtins.str = ..., + propagatedHistory: _history_events_pb2.PropagatedHistory | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["historyEvent", b"historyEvent"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "historyEvent", b"historyEvent", "propagatedHistory", b"propagatedHistory"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["historyEvent", b"historyEvent", "targetInstanceId", b"targetInstanceId"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "historyEvent", b"historyEvent", "propagatedHistory", b"propagatedHistory", "targetInstanceId", b"targetInstanceId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__propagatedHistory: _TypeAlias = _typing.Literal["propagatedHistory"] # noqa: Y015 + _WhichOneofArgType__propagatedHistory: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory"] # noqa: Y015 + def WhichOneof(self, oneof_group: _WhichOneofArgType__propagatedHistory) -> _WhichOneofReturnType__propagatedHistory | None: ... Global___WorkflowRuntimeStateMessage: _TypeAlias = WorkflowRuntimeStateMessage # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/task.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/task.py index 82bf062a..21bebfab 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/task.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/task.py @@ -19,6 +19,7 @@ import dapr.ext.workflow._durabletask.internal.helpers as pbh import dapr.ext.workflow._durabletask.internal.protos as pb +from dapr.ext.workflow.propagation import PropagatedHistory, PropagationScope T = TypeVar('T') TInput = TypeVar('TInput') @@ -112,6 +113,7 @@ def call_activity( input: Optional[TInput] = None, retry_policy: Optional[RetryPolicy] = None, app_id: Optional[str] = None, + propagation: Optional[PropagationScope] = None, ) -> Task[TOutput]: """Schedule an activity for execution. @@ -125,6 +127,11 @@ def call_activity( The retry policy to use for this activity call. app_id: Optional[str] The app ID that will execute the activity. If not specified, the activity will be executed by the same app as the orchestrator. + propagation: Optional[PropagationScope] + Controls whether this workflow's history is propagated to the activity. + ``None`` (default) propagates nothing. ``OWN_HISTORY`` sends this + workflow's own events; ``LINEAGE`` additionally forwards any history + this workflow itself received from its parent. Returns ------- @@ -142,6 +149,7 @@ def call_sub_orchestrator( instance_id: Optional[str] = None, retry_policy: Optional[RetryPolicy] = None, app_id: Optional[str] = None, + propagation: Optional[PropagationScope] = None, ) -> Task[TOutput]: """Schedule sub-orchestrator function for execution. @@ -158,6 +166,11 @@ def call_sub_orchestrator( The retry policy to use for this sub-orchestrator call. app_id: Optional[str] The app ID that will execute the sub-orchestrator. If not specified, the sub-orchestrator will be executed by the same app as the orchestrator. + propagation: Optional[PropagationScope] + Controls whether this workflow's history is propagated to the child + workflow. ``None`` (default) propagates nothing. ``OWN_HISTORY`` + sends this workflow's own events; ``LINEAGE`` additionally forwards + any history this workflow itself received from its parent. Returns ------- @@ -227,6 +240,19 @@ def is_patched(self, patch_name: str) -> bool: """ pass + @abstractmethod + def get_propagated_history(self) -> Optional[PropagatedHistory]: + """Return history propagated from a parent workflow, or ``None`` if + no history was propagated. + + Propagated history is populated when a parent workflow calls this + workflow with a propagation scope (``OWN_HISTORY`` or ``LINEAGE``) + and the runtime has propagation enabled. The result is a structured + view of the caller's recorded events; use it to introspect upstream + activities or child workflows before deciding what to do. + """ + pass + class FailureDetails: def __init__(self, message: str, error_type: str, stack_trace: Optional[str]): @@ -445,6 +471,7 @@ def __init__( task_execution_id: str = '', instance_id: Optional[str] = None, app_id: Optional[str] = None, + propagation: Optional[PropagationScope] = None, ) -> None: super().__init__() self._retry_policy = retry_policy @@ -456,6 +483,7 @@ def __init__( self._task_execution_id = task_execution_id self._instance_id = instance_id self._app_id = app_id + self._propagation = propagation def increment_attempt_count(self) -> None: self._attempt_count += 1 @@ -587,10 +615,17 @@ def when_any(tasks: list[Task]) -> WhenAnyTask: class ActivityContext: - def __init__(self, orchestration_id: str, task_id: int, task_execution_id: str = ''): + def __init__( + self, + orchestration_id: str, + task_id: int, + task_execution_id: str = '', + propagated_history: Optional[PropagatedHistory] = None, + ): self._orchestration_id = orchestration_id self._task_id = task_id self._task_execution_id = task_execution_id + self._propagated_history = propagated_history @property def orchestration_id(self) -> str: @@ -634,6 +669,11 @@ def task_execution_id(self) -> str: """ return self._task_execution_id + def get_propagated_history(self) -> Optional[PropagatedHistory]: + """Return history propagated from the calling workflow, or ``None`` if + the caller did not opt in to history propagation.""" + return self._propagated_history + # Orchestrators are generators that yield tasks and receive/return any type Orchestrator = Callable[[OrchestrationContext, TInput], Union[Generator[Task, Any, Any], TOutput]] diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/worker.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/worker.py index b76d07d6..84663064 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/worker.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/worker.py @@ -30,6 +30,7 @@ import grpc from dapr.ext.workflow._durabletask import deterministic, task from dapr.ext.workflow._durabletask.internal.grpc_interceptor import DefaultClientInterceptorImpl +from dapr.ext.workflow.propagation import PropagatedHistory, PropagationScope from google.protobuf import empty_pb2, timestamp_pb2 TInput = TypeVar('TInput') @@ -876,7 +877,14 @@ def _execute_orchestrator( ): try: executor = _OrchestrationExecutor(self._registry, self._logger) - result = executor.execute(req.instanceId, req.pastEvents, req.newEvents) + propagated = ( + PropagatedHistory.from_proto(req.propagatedHistory) + if req.HasField('propagatedHistory') + else None + ) + result = executor.execute( + req.instanceId, req.pastEvents, req.newEvents, propagated_history=propagated + ) version = None if result.version_name: @@ -982,8 +990,18 @@ def _execute_activity( with span_context: try: executor = _ActivityExecutor(self._registry, self._logger) + propagated = ( + PropagatedHistory.from_proto(req.propagatedHistory) + if req.HasField('propagatedHistory') + else None + ) result = executor.execute( - instance_id, req.name, req.taskId, req.input.value, req.taskExecutionId + instance_id, + req.name, + req.taskId, + req.input.value, + req.taskExecutionId, + propagated_history=propagated, ) res = pb.ActivityResponse( instanceId=instance_id, @@ -1070,6 +1088,13 @@ def __init__(self, instance_id: str): self._history_patches: dict[str, bool] = {} self._applied_patches: dict[str, bool] = {} self._encountered_patches: list[str] = [] + self._propagated_history: Optional[PropagatedHistory] = None + + def set_propagated_history(self, history: Optional[PropagatedHistory]) -> None: + self._propagated_history = history + + def get_propagated_history(self) -> Optional[PropagatedHistory]: + return self._propagated_history def run(self, generator: Generator[task.Task, Any, Any]): self._generator = generator @@ -1294,6 +1319,7 @@ def call_activity( input: Optional[TInput] = None, retry_policy: Optional[task.RetryPolicy] = None, app_id: Optional[str] = None, + propagation: Optional[PropagationScope] = None, ) -> task.Task[TOutput]: id = self.next_sequence_number() task_execution_id = str(self.new_guid()) @@ -1306,6 +1332,7 @@ def call_activity( is_sub_orch=False, app_id=app_id, task_execution_id=task_execution_id, + propagation=propagation, ) return self._pending_tasks.get(id, task.CompletableTask()) @@ -1317,6 +1344,7 @@ def call_sub_orchestrator( instance_id: Optional[str] = None, retry_policy: Optional[task.RetryPolicy] = None, app_id: Optional[str] = None, + propagation: Optional[PropagationScope] = None, ) -> task.Task[TOutput]: id = self.next_sequence_number() if isinstance(orchestrator, str): @@ -1331,6 +1359,7 @@ def call_sub_orchestrator( is_sub_orch=True, instance_id=instance_id, app_id=app_id, + propagation=propagation, ) return self._pending_tasks.get(id, task.CompletableTask()) @@ -1346,6 +1375,7 @@ def call_activity_function_helper( fn_task: Optional[task.CompletableTask[TOutput]] = None, app_id: Optional[str] = None, task_execution_id: str = '', + propagation: Optional[PropagationScope] = None, ): if id is None: id = self.next_sequence_number() @@ -1361,6 +1391,7 @@ def call_activity_function_helper( else: # When retrying, input is already encoded as a string (or None). encoded_input = str(input) if input is not None else None + propagation_scope = propagation.value if propagation is not None else None if not is_sub_orch: name = ( activity_function @@ -1368,7 +1399,12 @@ def call_activity_function_helper( else task.get_name(activity_function) ) action = ph.new_schedule_task_action( - id, name, encoded_input, router, task_execution_id=task_execution_id + id, + name, + encoded_input, + router, + task_execution_id=task_execution_id, + propagation_scope=propagation_scope, ) else: if instance_id is None: @@ -1377,7 +1413,12 @@ def call_activity_function_helper( if not isinstance(activity_function, str): raise ValueError('Orchestrator function name must be a string') action = ph.new_create_child_workflow_action( - id, activity_function, instance_id, encoded_input, router + id, + activity_function, + instance_id, + encoded_input, + router, + propagation_scope=propagation_scope, ) self._pending_actions[id] = action @@ -1394,6 +1435,7 @@ def call_activity_function_helper( task_execution_id=task_execution_id, instance_id=instance_id, app_id=app_id, + propagation=propagation, ) self._pending_tasks[id] = fn_task @@ -1541,6 +1583,7 @@ def execute( instance_id: str, old_events: Sequence[pb.HistoryEvent], new_events: Sequence[pb.HistoryEvent], + propagated_history: Optional[PropagatedHistory] = None, ) -> ExecutionResults: if not new_events: raise task.WorkflowStateError( @@ -1548,6 +1591,7 @@ def execute( ) ctx = _RuntimeOrchestrationContext(instance_id) + ctx.set_propagated_history(propagated_history) try: # Rebuild local state by replaying old history into the orchestrator function self._logger.debug( @@ -1710,6 +1754,7 @@ def process_event(self, ctx: _RuntimeOrchestrationContext, event: pb.HistoryEven fn_task=retryable, app_id=retryable._app_id, task_execution_id=retryable._task_execution_id, + propagation=retryable._propagation, ) else: ctx.resume() @@ -1964,6 +2009,7 @@ def execute( task_id: int, encoded_input: Optional[str], task_execution_id: str = '', + propagated_history: Optional[PropagatedHistory] = None, ) -> Optional[str]: """Executes an activity function and returns the serialized result, if any.""" self._logger.debug(f"{orchestration_id}/{task_id}: Executing activity '{name}'...") @@ -1974,7 +2020,12 @@ def execute( ) activity_input = shared.from_json(encoded_input) if encoded_input else None - ctx = task.ActivityContext(orchestration_id, task_id, task_execution_id) + ctx = task.ActivityContext( + orchestration_id, + task_id, + task_execution_id, + propagated_history=propagated_history, + ) # Execute the activity function activity_output = fn(ctx, activity_input) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py b/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py index 1f9f6417..fb97bb66 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py @@ -18,6 +18,7 @@ from dapr.ext.workflow._durabletask import task from dapr.ext.workflow.logger import Logger, LoggerOptions +from dapr.ext.workflow.propagation import PropagatedHistory, PropagationScope from dapr.ext.workflow.retry_policy import RetryPolicy from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext from dapr.ext.workflow.workflow_context import Workflow, WorkflowContext @@ -67,7 +68,9 @@ def call_activity( input: TInput = None, retry_policy: Optional[RetryPolicy] = None, app_id: Optional[str] = None, + propagation: Optional[PropagationScope] = None, ) -> task.Task[TOutput]: + retry_obj = retry_policy.obj if retry_policy is not None else None # Handle string activity names for multi-app workflow scenarios if isinstance(activity, str): activity_name = activity @@ -77,11 +80,12 @@ def call_activity( ) else: self._logger.debug(f'{self.instance_id}: Creating activity {activity_name}') - - if retry_policy is None: - return self.__obj.call_activity(activity=activity_name, input=input, app_id=app_id) return self.__obj.call_activity( - activity=activity_name, input=input, retry_policy=retry_policy.obj, app_id=app_id + activity=activity_name, + input=input, + retry_policy=retry_obj, + app_id=app_id, + propagation=propagation, ) # Handle function activity objects (original behavior) @@ -91,10 +95,12 @@ def call_activity( else: # this case should ideally never happen act = activity.__name__ - if retry_policy is None: - return self.__obj.call_activity(activity=act, input=input, app_id=app_id) return self.__obj.call_activity( - activity=act, input=input, retry_policy=retry_policy.obj, app_id=app_id + activity=act, + input=input, + retry_policy=retry_obj, + app_id=app_id, + propagation=propagation, ) def call_child_workflow( @@ -105,22 +111,20 @@ def call_child_workflow( instance_id: Optional[str] = None, retry_policy: Optional[RetryPolicy] = None, app_id: Optional[str] = None, + propagation: Optional[PropagationScope] = None, ) -> task.Task[TOutput]: + retry_obj = retry_policy.obj if retry_policy is not None else None # Handle string workflow names for multi-app workflow scenarios if isinstance(workflow, str): workflow_name = workflow self._logger.debug(f'{self.instance_id}: Creating child workflow {workflow_name}') - - if retry_policy is None: - return self.__obj.call_sub_orchestrator( - workflow_name, input=input, instance_id=instance_id, app_id=app_id - ) return self.__obj.call_sub_orchestrator( workflow_name, input=input, instance_id=instance_id, - retry_policy=retry_policy.obj, + retry_policy=retry_obj, app_id=app_id, + propagation=propagation, ) # Handle function workflow objects (original behavior) @@ -137,14 +141,18 @@ def wf(ctx: task.OrchestrationContext, inp: TInput): else: # this case should ideally never happen wf.__name__ = workflow.__name__ - if retry_policy is None: - return self.__obj.call_sub_orchestrator( - wf, input=input, instance_id=instance_id, app_id=app_id - ) return self.__obj.call_sub_orchestrator( - wf, input=input, instance_id=instance_id, retry_policy=retry_policy.obj, app_id=app_id + wf, + input=input, + instance_id=instance_id, + retry_policy=retry_obj, + app_id=app_id, + propagation=propagation, ) + def get_propagated_history(self) -> Optional[PropagatedHistory]: + return self.__obj.get_propagated_history() + def wait_for_external_event( self, name: str, diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/propagation.py b/ext/dapr-ext-workflow/dapr/ext/workflow/propagation.py new file mode 100644 index 00000000..b9f023b9 --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/propagation.py @@ -0,0 +1,349 @@ +# -*- coding: utf-8 -*- +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Workflow history propagation types. + +Public surface for the propagation feature: the :class:`PropagationScope` +enum used when scheduling activities or child workflows, plus the +:class:`PropagatedHistory` query API exposed on the receiving side via +:meth:`WorkflowContext.get_propagated_history` and +:meth:`WorkflowActivityContext.get_propagated_history`. +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from enum import Enum +from typing import Optional + +import dapr.ext.workflow._durabletask.internal.helpers as pbh +import dapr.ext.workflow._durabletask.internal.protos as pb + + +class PropagationScope(Enum): + """Controls how a parent workflow's history is propagated to children. + + Values map 1:1 to the protobuf ``HistoryPropagationScope`` enum; the + plumbing layer reads ``.value`` when writing to proto fields. + """ + + NONE = int(pb.HISTORY_PROPAGATION_SCOPE_NONE) + OWN_HISTORY = int(pb.HISTORY_PROPAGATION_SCOPE_OWN_HISTORY) + LINEAGE = int(pb.HISTORY_PROPAGATION_SCOPE_LINEAGE) + + +class PropagationNotFoundError(Exception): + """Raised when a query against propagated history finds no match.""" + + +@dataclass(frozen=True) +class ActivityResult: + """A reconstructed view of a single activity invocation from propagated history. + + ``input``/``output`` are the JSON-encoded string payloads or ``None`` when + unset. ``error`` is ``None`` unless the activity failed. + """ + + name: str + started: bool + completed: bool + failed: bool + input: Optional[str] + output: Optional[str] + error: Optional[pb.TaskFailureDetails] + + +@dataclass(frozen=True) +class ChildWorkflowResult: + """A reconstructed view of a single child workflow invocation.""" + + name: str + started: bool + completed: bool + failed: bool + output: Optional[str] + error: Optional[pb.TaskFailureDetails] + + +def _string_value_or_none(sv: Optional[pb.wrappers_pb2.StringValue]) -> Optional[str]: + if sv is None or pbh.is_empty(sv): + return None + return sv.value + + +def _resolve_activity( + events: list[pb.HistoryEvent], schedule_event: pb.HistoryEvent +) -> ActivityResult: + """Build an ActivityResult by matching TaskCompleted/TaskFailed against the + given TaskScheduled event's eventId. SDK retries reuse taskExecutionId, so + we match on the scheduling event ID instead.""" + ts = schedule_event.taskScheduled + schedule_id = schedule_event.eventId + completed = False + failed = False + output: Optional[str] = None + error: Optional[pb.TaskFailureDetails] = None + for e in events: + if e.HasField('taskCompleted') and e.taskCompleted.taskScheduledId == schedule_id: + completed = True + output = _string_value_or_none(e.taskCompleted.result) + elif e.HasField('taskFailed') and e.taskFailed.taskScheduledId == schedule_id: + failed = True + error = e.taskFailed.failureDetails + return ActivityResult( + name=ts.name, + started=True, + completed=completed, + failed=failed, + input=_string_value_or_none(ts.input), + output=output, + error=error, + ) + + +def _resolve_child_workflow( + events: list[pb.HistoryEvent], creation_event_id: int, name: str +) -> ChildWorkflowResult: + completed = False + failed = False + output: Optional[str] = None + error: Optional[pb.TaskFailureDetails] = None + for e in events: + if ( + e.HasField('childWorkflowInstanceCompleted') + and e.childWorkflowInstanceCompleted.taskScheduledId == creation_event_id + ): + completed = True + output = _string_value_or_none(e.childWorkflowInstanceCompleted.result) + elif ( + e.HasField('childWorkflowInstanceFailed') + and e.childWorkflowInstanceFailed.taskScheduledId == creation_event_id + ): + failed = True + error = e.childWorkflowInstanceFailed.failureDetails + return ChildWorkflowResult( + name=name, + started=True, + completed=completed, + failed=failed, + output=output, + error=error, + ) + + +@dataclass(frozen=True) +class WorkflowResult: + """A scoped view of a single workflow's chunk in propagated history. + + Use :meth:`get_activity_by_name` / :meth:`get_child_workflow_by_name` + to query specific items inside this chunk. Methods return the most-recent + occurrence by execution order, matching the Go SDK semantics. + """ + + instance_id: str + app_id: str + name: str + _events: list[pb.HistoryEvent] = field(repr=False) + + def get_activities_by_name(self, name: str) -> list[ActivityResult]: + """Return every activity in this chunk whose scheduled name matches, in + execution order. Empty list if none.""" + return [ + _resolve_activity(self._events, e) + for e in self._events + if e.HasField('taskScheduled') and e.taskScheduled.name == name + ] + + def get_activity_by_name(self, name: str) -> ActivityResult: + """Return the most recent activity in this chunk whose name matches. + + Raises :class:`PropagationNotFoundError` if no activity scheduled with + ``name`` is present. + """ + all_results = self.get_activities_by_name(name) + if not all_results: + raise PropagationNotFoundError( + f'no activity named {name!r} in propagated history for workflow {self.name!r}' + ) + return all_results[-1] + + def get_child_workflows_by_name(self, name: str) -> list[ChildWorkflowResult]: + """Return every child workflow in this chunk whose name matches, in + execution order.""" + return [ + _resolve_child_workflow(self._events, e.eventId, name) + for e in self._events + if e.HasField('childWorkflowInstanceCreated') + and e.childWorkflowInstanceCreated.name == name + ] + + def get_child_workflow_by_name(self, name: str) -> ChildWorkflowResult: + """Return the most recent child workflow in this chunk whose name matches. + + Raises :class:`PropagationNotFoundError` if no match is found. + """ + all_results = self.get_child_workflows_by_name(name) + if not all_results: + raise PropagationNotFoundError( + f'no child workflow named {name!r} in propagated history for workflow {self.name!r}' + ) + return all_results[-1] + + +@dataclass(frozen=True) +class _HistoryChunk: + app_id: str + instance_id: str + workflow_name: str + start_event_index: int + event_count: int + + +class PropagatedHistory: + """History propagated from a parent workflow to a child workflow or activity. + + A propagated history is composed of one or more chunks, each owned by a + distinct workflow instance. Chunks preserve execution order: index 0 is + the oldest ancestor, the last chunk is the immediate parent. Use the + ``get_*`` methods to slice the chain by app, instance, or workflow name. + """ + + def __init__( + self, + events: list[pb.HistoryEvent], + scope: PropagationScope, + chunks: list[_HistoryChunk], + ): + self._events = events + self._scope = scope + self._chunks = chunks + + @property + def events(self) -> list[pb.HistoryEvent]: + """All propagated history events, flattened in chunk order.""" + return self._events + + @property + def scope(self) -> PropagationScope: + """The propagation scope used to produce this history.""" + return self._scope + + def get_app_ids(self) -> list[str]: + """Ordered, deduplicated list of app IDs in the history chain.""" + seen: set[str] = set() + result: list[str] = [] + for c in self._chunks: + if c.app_id not in seen: + seen.add(c.app_id) + result.append(c.app_id) + return result + + def _chunk_events(self, chunk: _HistoryChunk) -> list[pb.HistoryEvent]: + return self._events[chunk.start_event_index : chunk.start_event_index + chunk.event_count] + + def get_events_by_app_id(self, app_id: str) -> list[pb.HistoryEvent]: + """Events produced by the given app, in execution order.""" + return [ + event + for chunk in self._chunks + if chunk.app_id == app_id + for event in self._chunk_events(chunk) + ] + + def get_events_by_instance_id(self, instance_id: str) -> list[pb.HistoryEvent]: + """Events produced by the given workflow instance, in execution order.""" + return [ + event + for chunk in self._chunks + if chunk.instance_id == instance_id + for event in self._chunk_events(chunk) + ] + + def get_events_by_workflow_name(self, workflow_name: str) -> list[pb.HistoryEvent]: + """Events produced by workflows with the given name, in execution order.""" + return [ + event + for chunk in self._chunks + if chunk.workflow_name == workflow_name + for event in self._chunk_events(chunk) + ] + + def _make_workflow_result(self, chunk: _HistoryChunk) -> WorkflowResult: + return WorkflowResult( + instance_id=chunk.instance_id, + app_id=chunk.app_id, + name=chunk.workflow_name, + _events=self._chunk_events(chunk), + ) + + def get_workflows(self) -> list[WorkflowResult]: + """All workflow results in the chain, in execution order + (ancestor first, immediate parent last).""" + return [self._make_workflow_result(c) for c in self._chunks] + + def get_workflows_by_name(self, name: str) -> list[WorkflowResult]: + """All workflows whose name matches, in execution order. Useful when + the chain contains the same name more than once (recursion / ContinueAsNew).""" + return [self._make_workflow_result(c) for c in self._chunks if c.workflow_name == name] + + def get_workflow_by_name(self, name: str) -> WorkflowResult: + """Most recent workflow in the chain whose name matches. + + Raises :class:`PropagationNotFoundError` if no match is found. + """ + all_results = self.get_workflows_by_name(name) + if not all_results: + raise PropagationNotFoundError(f'no workflow named {name!r} in propagated history') + return all_results[-1] + + @classmethod + def from_proto(cls, ph: Optional[pb.PropagatedHistory]) -> Optional[PropagatedHistory]: + """Build a PropagatedHistory from the wire-form proto. + + Each chunk's ``rawEvents`` are parsed once and the per-chunk events are + concatenated into a single ordered list. Structural validation runs + first: every chunk must carry a non-empty ``appId`` and every raw event + must parse as a ``HistoryEvent``. Returns ``None`` when ``ph`` itself + is ``None``. + + Raises: + ValueError: If the proto is structurally malformed. + """ + if ph is None: + return None + + events: list[pb.HistoryEvent] = [] + chunks: list[_HistoryChunk] = [] + for i, c in enumerate(ph.chunks): + if not c.appId: + raise ValueError(f'propagated history: chunk {i} has empty appId') + start = len(events) + for j, raw in enumerate(c.rawEvents): + event = pb.HistoryEvent() + try: + event.ParseFromString(raw) + except Exception as ex: + raise ValueError( + f'propagated history: chunk {i} (app {c.appId!r}): ' + f'failed to decode rawEvent {j}: {ex}' + ) from ex + events.append(event) + chunks.append( + _HistoryChunk( + app_id=c.appId, + instance_id=c.instanceId, + workflow_name=c.workflowName, + start_event_index=start, + event_count=len(events) - start, + ) + ) + return cls(events=events, scope=PropagationScope(ph.scope), chunks=chunks) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_activity_context.py b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_activity_context.py index 02dee15c..90974fab 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_activity_context.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_activity_context.py @@ -15,9 +15,10 @@ from __future__ import annotations -from typing import Callable, TypeVar +from typing import Callable, Optional, TypeVar from dapr.ext.workflow._durabletask import task +from dapr.ext.workflow.propagation import PropagatedHistory T = TypeVar('T') TInput = TypeVar('TInput') @@ -43,6 +44,11 @@ def task_id(self) -> int: def get_inner_context(self) -> task.ActivityContext: return self.__obj + def get_propagated_history(self) -> Optional[PropagatedHistory]: + """Return history propagated from the calling workflow, or ``None`` if + the caller did not opt in to history propagation.""" + return self.__obj.get_propagated_history() + # Activities are simple functions that can be scheduled by workflows Activity = Callable[..., TOutput] diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py index af31e84d..e3e98fe9 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py @@ -20,6 +20,7 @@ from typing import Any, Callable, Generator, Optional, TypeVar, Union from dapr.ext.workflow._durabletask import task +from dapr.ext.workflow.propagation import PropagatedHistory, PropagationScope from dapr.ext.workflow.workflow_activity_context import Activity T = TypeVar('T') @@ -112,6 +113,7 @@ def call_activity( *, input: Optional[TInput] = None, app_id: Optional[str] = None, + propagation: Optional[PropagationScope] = None, ) -> task.Task[TOutput]: """Schedule an activity for execution. @@ -123,6 +125,11 @@ def call_activity( The JSON-serializable input (or None) to pass to the activity. app_id: str | None The AppID that will execute the activity. + propagation: PropagationScope | None + Optional history propagation scope. ``OWN_HISTORY`` sends this + workflow's events to the activity; ``LINEAGE`` additionally + forwards history this workflow received from its parent. The + default (``None``) propagates nothing. Returns ------- @@ -139,6 +146,7 @@ def call_child_workflow( input: Optional[TInput] = None, instance_id: Optional[str] = None, app_id: Optional[str] = None, + propagation: Optional[PropagationScope] = None, ) -> task.Task[TOutput]: """Schedule child-workflow function for execution. @@ -153,6 +161,11 @@ def call_child_workflow( random UUID will be used. app_id: str The AppID that will execute the workflow. + propagation: PropagationScope | None + Optional history propagation scope. ``OWN_HISTORY`` sends this + workflow's events to the child; ``LINEAGE`` additionally forwards + history this workflow received from its parent. The default + (``None``) propagates nothing. Returns ------- @@ -161,6 +174,12 @@ def call_child_workflow( """ pass + @abstractmethod + def get_propagated_history(self) -> Optional[PropagatedHistory]: + """Return history propagated from a parent workflow, or ``None`` if + no history was propagated.""" + pass + @abstractmethod def wait_for_external_event( self, diff --git a/ext/dapr-ext-workflow/tests/durabletask/test_propagation.py b/ext/dapr-ext-workflow/tests/durabletask/test_propagation.py new file mode 100644 index 00000000..260ab1ab --- /dev/null +++ b/ext/dapr-ext-workflow/tests/durabletask/test_propagation.py @@ -0,0 +1,372 @@ +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests for the PropagatedHistory query API. + +Ported from the Go reference implementation at +durabletask-go/api/propagation_test.go. +""" + +from __future__ import annotations + +import dapr.ext.workflow._durabletask.internal.protos as pb +import pytest +from dapr.ext.workflow import ( + PropagatedHistory, + PropagationNotFoundError, + PropagationScope, +) +from google.protobuf import wrappers_pb2 + +# --- Test history fixtures --------------------------------------------------- + +# The fixtures below mirror Go's makeTestHistory. Two chunks: +# appA / wf-001 "MerchantCheckout": +# [0] ExecutionStarted MerchantCheckout +# [1] TaskScheduled ValidateMerchant (eventId=1) +# [-1] TaskCompleted taskScheduledId=1 +# [2] ChildWorkflowInstanceCreated ProcessPayment (instance=wf-002) +# +# appB / wf-002 "ProcessPayment": +# [0] ExecutionStarted ProcessPayment +# [1] TaskScheduled ValidateCard (eventId=1, exec-2) — completed +# [-1] TaskCompleted taskScheduledId=1 +# [2] TaskScheduled ValidateCard (eventId=2, exec-3) — failed +# [-1] TaskFailed taskScheduledId=2 +# [3] ChildWorkflowInstanceCreated FraudDetection (instance=wf-003) + + +def _str_value(s: str) -> wrappers_pb2.StringValue: + return wrappers_pb2.StringValue(value=s) + + +def _execution_started(name: str) -> pb.HistoryEvent: + return pb.HistoryEvent( + eventId=0, + executionStarted=pb.ExecutionStartedEvent(name=name), + ) + + +def _task_scheduled(event_id: int, name: str, exec_id: str, raw_input: str) -> pb.HistoryEvent: + return pb.HistoryEvent( + eventId=event_id, + taskScheduled=pb.TaskScheduledEvent( + name=name, + taskExecutionId=exec_id, + input=_str_value(raw_input), + ), + ) + + +def _task_completed(task_scheduled_id: int, exec_id: str, result: str) -> pb.HistoryEvent: + return pb.HistoryEvent( + eventId=-1, + taskCompleted=pb.TaskCompletedEvent( + taskScheduledId=task_scheduled_id, + taskExecutionId=exec_id, + result=_str_value(result), + ), + ) + + +def _task_failed(task_scheduled_id: int, exec_id: str, message: str) -> pb.HistoryEvent: + return pb.HistoryEvent( + eventId=-1, + taskFailed=pb.TaskFailedEvent( + taskScheduledId=task_scheduled_id, + taskExecutionId=exec_id, + failureDetails=pb.TaskFailureDetails(errorMessage=message), + ), + ) + + +def _child_wf_created(event_id: int, name: str, instance_id: str) -> pb.HistoryEvent: + return pb.HistoryEvent( + eventId=event_id, + childWorkflowInstanceCreated=pb.ChildWorkflowInstanceCreatedEvent( + name=name, + instanceId=instance_id, + ), + ) + + +def _make_chunk( + app_id: str, + instance_id: str, + workflow_name: str, + events: list[pb.HistoryEvent], +) -> pb.PropagatedHistoryChunk: + return pb.PropagatedHistoryChunk( + appId=app_id, + instanceId=instance_id, + workflowName=workflow_name, + rawEvents=[e.SerializeToString() for e in events], + ) + + +def _make_proto_history() -> pb.PropagatedHistory: + chunk_a_events = [ + _execution_started('MerchantCheckout'), + _task_scheduled(1, 'ValidateMerchant', 'exec-1', '{"merchant":"abc"}'), + _task_completed(1, 'exec-1', 'true'), + _child_wf_created(2, 'ProcessPayment', 'wf-002'), + ] + chunk_b_events = [ + _execution_started('ProcessPayment'), + _task_scheduled(1, 'ValidateCard', 'exec-2', '{"card":"4242"}'), + _task_completed(1, 'exec-2', 'true'), + _task_scheduled(2, 'ValidateCard', 'exec-3', '{"card":"4242","retry":true}'), + _task_failed(2, 'exec-3', 'card declined'), + _child_wf_created(3, 'FraudDetection', 'wf-003'), + ] + return pb.PropagatedHistory( + scope=pb.HISTORY_PROPAGATION_SCOPE_LINEAGE, + chunks=[ + _make_chunk('appA', 'wf-001', 'MerchantCheckout', chunk_a_events), + _make_chunk('appB', 'wf-002', 'ProcessPayment', chunk_b_events), + ], + ) + + +@pytest.fixture +def history() -> PropagatedHistory: + parsed = PropagatedHistory.from_proto(_make_proto_history()) + assert parsed is not None + return parsed + + +# --- Top-level structural queries ------------------------------------------- + + +def test_scope_is_preserved(history: PropagatedHistory): + assert history.scope == PropagationScope.LINEAGE + + +def test_events_are_flattened_in_chunk_order(history: PropagatedHistory): + assert len(history.events) == 10 + assert history.events[0].executionStarted.name == 'MerchantCheckout' + assert history.events[4].executionStarted.name == 'ProcessPayment' + + +def test_get_app_ids_returns_unique_ordered(history: PropagatedHistory): + assert history.get_app_ids() == ['appA', 'appB'] + + +def test_get_events_by_app_id(history: PropagatedHistory): + appa_events = history.get_events_by_app_id('appA') + appb_events = history.get_events_by_app_id('appB') + assert len(appa_events) == 4 + assert len(appb_events) == 6 + assert history.get_events_by_app_id('missing') == [] + + +def test_get_events_by_instance_id(history: PropagatedHistory): + wf001_events = history.get_events_by_instance_id('wf-001') + assert len(wf001_events) == 4 + assert wf001_events[0].executionStarted.name == 'MerchantCheckout' + + +def test_get_events_by_workflow_name(history: PropagatedHistory): + pp_events = history.get_events_by_workflow_name('ProcessPayment') + assert len(pp_events) == 6 + assert pp_events[0].executionStarted.name == 'ProcessPayment' + + +# --- Workflow-level queries -------------------------------------------------- + + +def test_get_workflows_returns_chunks_in_order(history: PropagatedHistory): + workflows = history.get_workflows() + assert len(workflows) == 2 + + assert workflows[0].name == 'MerchantCheckout' + assert workflows[0].app_id == 'appA' + assert workflows[0].instance_id == 'wf-001' + + assert workflows[1].name == 'ProcessPayment' + assert workflows[1].app_id == 'appB' + assert workflows[1].instance_id == 'wf-002' + + +def test_get_workflow_by_name_returns_match(history: PropagatedHistory): + wf = history.get_workflow_by_name('ProcessPayment') + assert wf.name == 'ProcessPayment' + assert wf.instance_id == 'wf-002' + + +def test_get_workflow_by_name_raises_when_missing(history: PropagatedHistory): + with pytest.raises(PropagationNotFoundError): + history.get_workflow_by_name('NotARealWorkflow') + + +def test_get_workflows_by_name_returns_all_matches(): + """If the same workflow name appears in multiple chunks (e.g. ContinueAsNew + or recursion), get_workflows_by_name returns every occurrence and + get_workflow_by_name returns the last.""" + + chunk_events = [_execution_started('Loop')] + proto = pb.PropagatedHistory( + scope=pb.HISTORY_PROPAGATION_SCOPE_LINEAGE, + chunks=[ + _make_chunk('appA', 'wf-1', 'Loop', chunk_events), + _make_chunk('appA', 'wf-2', 'Loop', chunk_events), + ], + ) + ph = PropagatedHistory.from_proto(proto) + assert ph is not None + + all_loops = ph.get_workflows_by_name('Loop') + assert len(all_loops) == 2 + assert ph.get_workflow_by_name('Loop').instance_id == 'wf-2' + + +# --- Activity resolution ---------------------------------------------------- + + +def test_get_activity_by_name_returns_completed_result(history: PropagatedHistory): + merchant = history.get_workflow_by_name('MerchantCheckout') + activity = merchant.get_activity_by_name('ValidateMerchant') + + assert activity.name == 'ValidateMerchant' + assert activity.started + assert activity.completed + assert not activity.failed + assert activity.input == '{"merchant":"abc"}' + assert activity.output == 'true' + assert activity.error is None + + +def test_get_activities_by_name_returns_all_invocations(history: PropagatedHistory): + payment = history.get_workflow_by_name('ProcessPayment') + cards = payment.get_activities_by_name('ValidateCard') + + assert len(cards) == 2 + assert cards[0].completed and not cards[0].failed + assert cards[0].output == 'true' + + assert cards[1].failed and not cards[1].completed + assert cards[1].error is not None + assert cards[1].error.errorMessage == 'card declined' + + +def test_get_activity_by_name_returns_last_invocation(history: PropagatedHistory): + """get_activity_by_name returns the most recent invocation in execution + order, matching Go semantics.""" + payment = history.get_workflow_by_name('ProcessPayment') + last = payment.get_activity_by_name('ValidateCard') + assert last.failed + assert last.error is not None + assert last.error.errorMessage == 'card declined' + + +def test_get_activity_by_name_raises_when_missing(history: PropagatedHistory): + payment = history.get_workflow_by_name('ProcessPayment') + with pytest.raises(PropagationNotFoundError): + payment.get_activity_by_name('NotAnActivity') + + +def test_activity_not_yet_completed_reports_started_only(): + """A TaskScheduled with no matching TaskCompleted/TaskFailed is reported as + started but neither completed nor failed.""" + events = [ + _execution_started('StillRunning'), + _task_scheduled(1, 'Pending', 'exec-1', 'in'), + ] + proto = pb.PropagatedHistory( + scope=pb.HISTORY_PROPAGATION_SCOPE_OWN_HISTORY, + chunks=[_make_chunk('appA', 'wf-1', 'StillRunning', events)], + ) + ph = PropagatedHistory.from_proto(proto) + assert ph is not None + pending = ph.get_workflow_by_name('StillRunning').get_activity_by_name('Pending') + + assert pending.started + assert not pending.completed + assert not pending.failed + assert pending.input == 'in' + assert pending.output is None + + +# --- Child workflow resolution ---------------------------------------------- + + +def test_get_child_workflow_by_name(history: PropagatedHistory): + merchant = history.get_workflow_by_name('MerchantCheckout') + child = merchant.get_child_workflow_by_name('ProcessPayment') + + assert child.name == 'ProcessPayment' + assert child.started + + +def test_get_child_workflow_by_name_raises_when_missing(history: PropagatedHistory): + merchant = history.get_workflow_by_name('MerchantCheckout') + with pytest.raises(PropagationNotFoundError): + merchant.get_child_workflow_by_name('NotAChild') + + +# --- from_proto / structural validation ------------------------------------- + + +def test_from_proto_returns_none_for_none_input(): + assert PropagatedHistory.from_proto(None) is None + + +def test_from_proto_rejects_chunk_with_empty_app_id(): + bad_proto = pb.PropagatedHistory( + scope=pb.HISTORY_PROPAGATION_SCOPE_LINEAGE, + chunks=[ + pb.PropagatedHistoryChunk( + appId='', + instanceId='wf-1', + workflowName='X', + rawEvents=[], + ), + ], + ) + with pytest.raises(ValueError, match='empty appId'): + PropagatedHistory.from_proto(bad_proto) + + +def test_from_proto_rejects_malformed_raw_event(): + bad_proto = pb.PropagatedHistory( + scope=pb.HISTORY_PROPAGATION_SCOPE_LINEAGE, + chunks=[ + pb.PropagatedHistoryChunk( + appId='appA', + instanceId='wf-1', + workflowName='X', + rawEvents=[b'\xff\xff\xff\xff\xff\xff\xff\xff garbage'], + ), + ], + ) + with pytest.raises(ValueError, match='rawEvent'): + PropagatedHistory.from_proto(bad_proto) + + +def test_from_proto_round_trip_preserves_events(): + proto = _make_proto_history() + ph = PropagatedHistory.from_proto(proto) + assert ph is not None + assert len(ph.events) == 10 + assert ph.scope == PropagationScope.LINEAGE + assert ph.get_app_ids() == ['appA', 'appB'] + + +def test_from_proto_handles_empty_chunks(): + proto = pb.PropagatedHistory( + scope=pb.HISTORY_PROPAGATION_SCOPE_NONE, + chunks=[], + ) + ph = PropagatedHistory.from_proto(proto) + assert ph is not None + assert ph.events == [] + assert ph.get_app_ids() == [] + assert ph.get_workflows() == [] diff --git a/ext/dapr-ext-workflow/tests/durabletask/test_propagation_wiring.py b/ext/dapr-ext-workflow/tests/durabletask/test_propagation_wiring.py new file mode 100644 index 00000000..a9418247 --- /dev/null +++ b/ext/dapr-ext-workflow/tests/durabletask/test_propagation_wiring.py @@ -0,0 +1,232 @@ +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for runtime-side history propagation wiring: + +- :meth:`OrchestrationContext.call_activity(propagation=...)` and + :meth:`call_sub_orchestrator(propagation=...)` set ``historyPropagationScope`` + on the emitted action. +- :meth:`OrchestrationContext.get_propagated_history` returns the history that + the worker parsed off the incoming ``WorkflowRequest``. +- :meth:`ActivityContext.get_propagated_history` returns the history that the + worker parsed off the incoming ``ActivityRequest``. +""" + +from __future__ import annotations + +import json +import logging + +import dapr.ext.workflow._durabletask.internal.helpers as helpers +import dapr.ext.workflow._durabletask.internal.protos as pb +from dapr.ext.workflow import PropagatedHistory, PropagationScope +from dapr.ext.workflow._durabletask import task, worker + +TEST_LOGGER = logging.getLogger('tests') +TEST_INSTANCE_ID = 'wiring-instance' + + +# --- Helpers ----------------------------------------------------------------- + + +def _no_op_activity(_ctx, _inp): + return None + + +def _single_chunk_history(workflow_name: str = 'Parent') -> pb.PropagatedHistory: + """Build a tiny but valid PropagatedHistory proto.""" + start = pb.HistoryEvent( + eventId=0, executionStarted=pb.ExecutionStartedEvent(name=workflow_name) + ) + return pb.PropagatedHistory( + scope=pb.HISTORY_PROPAGATION_SCOPE_OWN_HISTORY, + chunks=[ + pb.PropagatedHistoryChunk( + appId='parent-app', + instanceId='parent-instance', + workflowName=workflow_name, + rawEvents=[start.SerializeToString()], + ), + ], + ) + + +# --- Outgoing: actions carry historyPropagationScope ------------------------ + + +def test_call_activity_emits_no_propagation_by_default(): + def orchestrator(ctx: task.OrchestrationContext, _): + yield ctx.call_activity(_no_op_activity, input=1) + + registry = worker._Registry() + name = registry.add_orchestrator(orchestrator) + executor = worker._OrchestrationExecutor(registry, TEST_LOGGER) + result = executor.execute( + TEST_INSTANCE_ID, + [], + [ + helpers.new_workflow_started_event(), + helpers.new_execution_started_event(name, TEST_INSTANCE_ID, encoded_input=None), + ], + ) + + assert len(result.actions) == 1 + schedule = result.actions[0].scheduleTask + assert not schedule.HasField('historyPropagationScope') + + +def test_call_activity_emits_own_history_when_requested(): + def orchestrator(ctx: task.OrchestrationContext, _): + yield ctx.call_activity(_no_op_activity, input=1, propagation=PropagationScope.OWN_HISTORY) + + registry = worker._Registry() + name = registry.add_orchestrator(orchestrator) + executor = worker._OrchestrationExecutor(registry, TEST_LOGGER) + result = executor.execute( + TEST_INSTANCE_ID, + [], + [ + helpers.new_workflow_started_event(), + helpers.new_execution_started_event(name, TEST_INSTANCE_ID, encoded_input=None), + ], + ) + + schedule = result.actions[0].scheduleTask + assert schedule.HasField('historyPropagationScope') + assert schedule.historyPropagationScope == pb.HISTORY_PROPAGATION_SCOPE_OWN_HISTORY + + +def test_call_sub_orchestrator_emits_lineage_when_requested(): + def orchestrator(ctx: task.OrchestrationContext, _): + yield ctx.call_sub_orchestrator('ChildWf', input=None, propagation=PropagationScope.LINEAGE) + + registry = worker._Registry() + name = registry.add_orchestrator(orchestrator) + executor = worker._OrchestrationExecutor(registry, TEST_LOGGER) + result = executor.execute( + TEST_INSTANCE_ID, + [], + [ + helpers.new_workflow_started_event(), + helpers.new_execution_started_event(name, TEST_INSTANCE_ID, encoded_input=None), + ], + ) + + create = result.actions[0].createChildWorkflow + assert create.HasField('historyPropagationScope') + assert create.historyPropagationScope == pb.HISTORY_PROPAGATION_SCOPE_LINEAGE + + +# --- Incoming: ctx.get_propagated_history is populated ---------------------- + + +def test_orchestration_executor_exposes_propagated_history(): + """Build an executor, run an orchestrator that reads + ctx.get_propagated_history, and verify the propagated chunk reached it.""" + + captured: dict[str, PropagatedHistory | None] = {'history': None} + + def orchestrator(ctx: task.OrchestrationContext, _): + captured['history'] = ctx.get_propagated_history() + return 'done' + + registry = worker._Registry() + name = registry.add_orchestrator(orchestrator) + executor = worker._OrchestrationExecutor(registry, TEST_LOGGER) + + propagated = PropagatedHistory.from_proto(_single_chunk_history()) + assert propagated is not None + + executor.execute( + TEST_INSTANCE_ID, + [], + [ + helpers.new_workflow_started_event(), + helpers.new_execution_started_event(name, TEST_INSTANCE_ID, encoded_input=None), + ], + propagated_history=propagated, + ) + + history = captured['history'] + assert history is not None + assert history.get_app_ids() == ['parent-app'] + assert history.get_workflow_by_name('Parent').instance_id == 'parent-instance' + + +def test_orchestration_executor_propagated_history_is_none_by_default(): + captured: dict[str, PropagatedHistory | None] = {'history': 'sentinel'} # type: ignore[dict-item] + + def orchestrator(ctx: task.OrchestrationContext, _): + captured['history'] = ctx.get_propagated_history() + return 'done' + + registry = worker._Registry() + name = registry.add_orchestrator(orchestrator) + executor = worker._OrchestrationExecutor(registry, TEST_LOGGER) + executor.execute( + TEST_INSTANCE_ID, + [], + [ + helpers.new_workflow_started_event(), + helpers.new_execution_started_event(name, TEST_INSTANCE_ID, encoded_input=None), + ], + ) + assert captured['history'] is None + + +def test_activity_executor_exposes_propagated_history(): + captured: dict[str, PropagatedHistory | None] = {'history': None} + + def reading_activity(ctx: task.ActivityContext, _): + captured['history'] = ctx.get_propagated_history() + return 'ok' + + registry = worker._Registry() + activity_name = registry.add_activity(reading_activity) + executor = worker._ActivityExecutor(registry, TEST_LOGGER) + + propagated = PropagatedHistory.from_proto(_single_chunk_history('Caller')) + assert propagated is not None + + encoded_output = executor.execute( + orchestration_id='wf-1', + name=activity_name, + task_id=1, + encoded_input=json.dumps(None), + task_execution_id='exec-1', + propagated_history=propagated, + ) + assert encoded_output == '"ok"' + + history = captured['history'] + assert history is not None + assert history.get_app_ids() == ['parent-app'] + assert history.get_workflow_by_name('Caller').instance_id == 'parent-instance' + + +def test_activity_executor_propagated_history_is_none_by_default(): + captured: dict[str, PropagatedHistory | None] = {'history': 'sentinel'} # type: ignore[dict-item] + + def reading_activity(ctx: task.ActivityContext, _): + captured['history'] = ctx.get_propagated_history() + return None + + registry = worker._Registry() + activity_name = registry.add_activity(reading_activity) + executor = worker._ActivityExecutor(registry, TEST_LOGGER) + executor.execute( + orchestration_id='wf-1', + name=activity_name, + task_id=1, + encoded_input=json.dumps(None), + task_execution_id='exec-1', + ) + assert captured['history'] is None diff --git a/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py b/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py index 50517a30..22350dda 100644 --- a/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py +++ b/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py @@ -17,6 +17,8 @@ from datetime import datetime from unittest import mock +import dapr.ext.workflow._durabletask.internal.protos as pb +from dapr.ext.workflow import PropagatedHistory from dapr.ext.workflow._durabletask import worker from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext @@ -33,19 +35,25 @@ class FakeOrchestrationContext: def __init__(self): self.instance_id = mock_instance_id self.custom_status = None + self._propagated_history = None def create_timer(self, fire_at): return mock_create_timer - def call_activity(self, activity, input, app_id): + def call_activity(self, activity, input, app_id, retry_policy=None, propagation=None): return mock_call_activity - def call_sub_orchestrator(self, orchestrator, input, instance_id, app_id): + def call_sub_orchestrator( + self, orchestrator, input, instance_id, app_id, retry_policy=None, propagation=None + ): return mock_call_sub_orchestrator def set_custom_status(self, custom_status): self.custom_status = custom_status + def get_propagated_history(self): + return self._propagated_history + class DaprWorkflowContextTest(unittest.TestCase): def mock_client_activity(ctx: WorkflowActivityContext, input): @@ -74,3 +82,41 @@ def test_workflow_context_functions(self): dapr_wf_ctx.set_custom_status(mock_custom_status) assert fakeContext.custom_status == mock_custom_status + + def test_get_propagated_history_proxies_inner_context(self): + with mock.patch( + 'dapr.ext.workflow._durabletask.worker._RuntimeOrchestrationContext', + return_value=FakeOrchestrationContext(), + ): + fake = worker._RuntimeOrchestrationContext(mock_instance_id) + history_proto = pb.PropagatedHistory( + scope=pb.HISTORY_PROPAGATION_SCOPE_OWN_HISTORY, + chunks=[ + pb.PropagatedHistoryChunk( + appId='upstream', + instanceId='upstream-1', + workflowName='Caller', + rawEvents=[ + pb.HistoryEvent( + eventId=0, + executionStarted=pb.ExecutionStartedEvent(name='Caller'), + ).SerializeToString(), + ], + ), + ], + ) + fake._propagated_history = PropagatedHistory.from_proto(history_proto) + dapr_wf_ctx = DaprWorkflowContext(fake) + + history = dapr_wf_ctx.get_propagated_history() + assert history is not None + assert history.get_app_ids() == ['upstream'] + + def test_get_propagated_history_returns_none_when_not_set(self): + with mock.patch( + 'dapr.ext.workflow._durabletask.worker._RuntimeOrchestrationContext', + return_value=FakeOrchestrationContext(), + ): + fake = worker._RuntimeOrchestrationContext(mock_instance_id) + dapr_wf_ctx = DaprWorkflowContext(fake) + assert dapr_wf_ctx.get_propagated_history() is None diff --git a/ext/dapr-ext-workflow/tests/test_workflow_activity_context.py b/ext/dapr-ext-workflow/tests/test_workflow_activity_context.py index 679738fe..43a99748 100644 --- a/ext/dapr-ext-workflow/tests/test_workflow_activity_context.py +++ b/ext/dapr-ext-workflow/tests/test_workflow_activity_context.py @@ -16,6 +16,8 @@ import unittest from unittest import mock +import dapr.ext.workflow._durabletask.internal.protos as pb +from dapr.ext.workflow import PropagatedHistory from dapr.ext.workflow._durabletask import task from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext @@ -24,6 +26,9 @@ class FakeActivityContext: + def __init__(self, propagated_history=None): + self._propagated_history = propagated_history + @property def orchestration_id(self): return mock_orchestration_id @@ -32,6 +37,9 @@ def orchestration_id(self): def task_id(self): return mock_task + def get_propagated_history(self): + return self._propagated_history + class WorkflowActivityContextTest(unittest.TestCase): def test_workflow_activity_context(self): @@ -48,3 +56,33 @@ def test_workflow_activity_context(self): actual_task_id = act_ctx.task_id assert actual_task_id == mock_task + + def test_workflow_activity_context_get_propagated_history(self): + history_proto = pb.PropagatedHistory( + scope=pb.HISTORY_PROPAGATION_SCOPE_OWN_HISTORY, + chunks=[ + pb.PropagatedHistoryChunk( + appId='caller-app', + instanceId='caller-instance', + workflowName='Caller', + rawEvents=[ + pb.HistoryEvent( + eventId=0, + executionStarted=pb.ExecutionStartedEvent(name='Caller'), + ).SerializeToString(), + ], + ), + ], + ) + propagated = PropagatedHistory.from_proto(history_proto) + fake = FakeActivityContext(propagated_history=propagated) + act_ctx = WorkflowActivityContext(fake) + + history = act_ctx.get_propagated_history() + assert history is not None + assert history.get_app_ids() == ['caller-app'] + + def test_workflow_activity_context_get_propagated_history_default_none(self): + fake = FakeActivityContext() + act_ctx = WorkflowActivityContext(fake) + assert act_ctx.get_propagated_history() is None diff --git a/tests/examples/test_workflow.py b/tests/examples/test_workflow.py index bf641c6d..45aa0e85 100644 --- a/tests/examples/test_workflow.py +++ b/tests/examples/test_workflow.py @@ -44,3 +44,22 @@ def test_simple_workflow(dapr): output = dapr.run('--app-id workflow-simple -- python3 simple.py', timeout=60) for line in EXPECTED_SIMPLE: assert line in output, f'Missing in output: {line}' + + +EXPECTED_HISTORY_PROPAGATION = [ + '*** validating merchant merchant-42', + "*** process_payment received parent context for merchant 'merchant-42'", + '*** log_summary saw parent on app', + 'validate_merchant -> completed=True output={"merchant_id": "merchant-42", "valid": true}', + '*** workflow completed: status=COMPLETED', +] + + +@pytest.mark.example_dir('workflow') +def test_history_propagation(dapr): + output = dapr.run( + '--app-id workflow-history-propagation -- python3 history_propagation.py', + timeout=60, + ) + for line in EXPECTED_HISTORY_PROPAGATION: + assert line in output, f'Missing in output: {line}'