├── .github ├── CODEOWNERS └── workflows │ └── ci.yml ├── .gitignore ├── LICENSE ├── README.md ├── activity_worker ├── README.md ├── __init__.py ├── activity_worker.py └── go_workflow │ ├── go.mod │ ├── go.sum │ └── main.go ├── bedrock ├── README.md ├── __init__.py ├── basic │ ├── README.md │ ├── __init__.py │ ├── run_worker.py │ ├── send_message.py │ └── workflows.py ├── entity │ ├── README.md │ ├── __init__.py │ ├── end_chat.py │ ├── get_history.py │ ├── run_worker.py │ ├── send_message.py │ └── workflows.py ├── shared │ ├── __init__.py │ └── activities.py └── signals_and_queries │ ├── README.md │ ├── __init__.py │ ├── get_history.py │ ├── run_worker.py │ ├── send_message.py │ └── workflows.py ├── cloud_export_to_parquet ├── README.md ├── __init__.py ├── create_schedule.py ├── data_trans_activities.py ├── run_worker.py └── workflows.py ├── context_propagation ├── README.md ├── __init__.py ├── activities.py ├── interceptor.py ├── shared.py ├── starter.py ├── worker.py └── workflows.py ├── custom_converter ├── README.md ├── __init__.py ├── shared.py ├── starter.py ├── worker.py └── workflow.py ├── custom_decorator ├── README.md ├── __init__.py ├── activity_utils.py ├── starter.py └── worker.py ├── custom_metric ├── README.md ├── __init__.py ├── activity.py ├── starter.py ├── worker.py └── workflow.py ├── dsl ├── README.md ├── __init__.py ├── activities.py ├── starter.py ├── worker.py ├── workflow.py ├── workflow1.yaml └── workflow2.yaml ├── encryption ├── README.md ├── __init__.py ├── codec.py ├── codec_server.py ├── starter.py └── worker.py ├── gevent_async ├── README.md ├── __init__.py ├── activity.py ├── executor.py ├── starter.py ├── test │ ├── __init__.py │ └── run_combined.py ├── worker.py └── workflow.py ├── hello ├── README.md ├── __init__.py ├── hello_activity.py ├── hello_activity_choice.py ├── hello_activity_method.py ├── hello_activity_multiprocess.py ├── hello_activity_retry.py ├── hello_activity_threaded.py ├── hello_async_activity_completion.py ├── hello_cancellation.py ├── hello_child_workflow.py ├── hello_continue_as_new.py ├── hello_cron.py ├── hello_exception.py ├── hello_local_activity.py ├── hello_mtls.py ├── hello_parallel_activity.py ├── hello_patch.py ├── hello_query.py ├── hello_search_attributes.py ├── hello_signal.py └── hello_update.py ├── langchain ├── README.md ├── activities.py ├── langchain_interceptor.py ├── starter.py ├── worker.py └── workflow.py ├── message_passing ├── __init__.py ├── introduction │ ├── README.md │ ├── __init__.py │ ├── activities.py │ ├── starter.py │ ├── worker.py │ └── workflows.py ├── safe_message_handlers │ ├── README.md │ ├── __init__.py │ ├── activities.py │ ├── starter.py │ ├── worker.py │ └── workflow.py ├── update_with_start │ └── lazy_initialization │ │ ├── README.md │ │ ├── __init__.py │ │ ├── activities.py │ │ ├── starter.py │ │ ├── worker.py │ │ └── workflows.py ├── waiting_for_handlers │ ├── README.md │ ├── __init__.py │ ├── activities.py │ ├── starter.py │ ├── worker.py │ └── workflows.py └── waiting_for_handlers_and_compensation │ ├── README.md │ ├── __init__.py │ ├── activities.py │ ├── starter.py │ ├── worker.py │ └── workflows.py ├── open_telemetry ├── README.md ├── __init__.py ├── aspire-metrics-screenshot.png ├── aspire-traces-screenshot.png ├── docker-compose.yaml ├── starter.py └── worker.py ├── patching ├── README.md ├── __init__.py ├── activities.py ├── starter.py ├── worker.py ├── workflow_1_initial.py ├── workflow_2_patched.py ├── workflow_3_patch_deprecated.py └── workflow_4_patch_complete.py ├── polling ├── README.md ├── __init__.py ├── frequent │ ├── README.md │ ├── __init__.py │ ├── activities.py │ ├── run_frequent.py │ ├── run_worker.py │ └── workflows.py ├── infrequent │ ├── README.md │ ├── __init__.py │ ├── activities.py │ ├── run_infrequent.py │ ├── run_worker.py │ └── workflows.py ├── periodic_sequence │ ├── README.md │ ├── __init__.py │ ├── activities.py │ ├── run_periodic.py │ ├── run_worker.py │ └── workflows.py └── test_service.py ├── prometheus ├── README.md ├── __init__.py ├── starter.py └── worker.py ├── pydantic_converter ├── README.md ├── __init__.py ├── starter.py └── worker.py ├── pydantic_converter_v1 ├── README.md ├── __init__.py ├── converter.py ├── starter.py └── worker.py ├── pyproject.toml ├── replay ├── README.md ├── __init__.py ├── replayer.py ├── starter.py └── worker.py ├── resource_pool ├── README.md ├── __init__.py ├── pool_client │ ├── __init__.py │ ├── resource_pool_client.py │ └── resource_pool_workflow.py ├── resource_user_workflow.py ├── shared.py ├── starter.py └── worker.py ├── schedules ├── README.md ├── __init__.py ├── backfill_schedule.py ├── delete_schedule.py ├── describe_schedule.py ├── list_schedule.py ├── pause_schedule.py ├── run_worker.py ├── start_schedule.py ├── trigger_schedule.py ├── update_schedule.py ├── your_activities.py ├── your_dataobject.py └── your_workflows.py ├── sentry ├── README.md ├── __init__.py ├── interceptor.py ├── starter.py └── worker.py ├── sleep_for_days ├── README.md ├── __init__.py ├── activities.py ├── starter.py ├── worker.py └── workflows.py ├── tests ├── __init__.py ├── activity_sticky_queues │ ├── __init__.py │ ├── activity_sticky_queues_activity_test.py │ └── activity_sticky_worker_workflow_test.py ├── conftest.py ├── context_propagation │ ├── __init__.py │ └── workflow_test.py ├── custom_converter │ ├── __init__.py │ └── workflow_test.py ├── custom_metric │ ├── __init__.py │ └── workflow_test.py ├── hello │ ├── __init__.py │ ├── hello_activity_choice_test.py │ ├── hello_activity_test.py │ ├── hello_cancellation_test.py │ ├── hello_child_test.py │ ├── hello_query_test.py │ ├── hello_signal_test.py │ └── hello_update_test.py ├── message_passing │ ├── introduction │ │ └── test_introduction_sample.py │ ├── lazy_initialization │ │ └── test_lazy_initialization.py │ ├── safe_message_handlers │ │ └── workflow_test.py │ ├── waiting_for_handlers │ │ └── waiting_for_handlers_test.py │ └── waiting_for_handlers_and_compensation │ │ └── waiting_for_handlers_and_compensation_test.py ├── polling │ └── infrequent │ │ ├── __init__.py │ │ └── workflow_test.py ├── pydantic_converter │ ├── __init__.py │ └── workflow_test.py ├── pydantic_converter_v1 │ ├── __init__.py │ └── workflow_test.py ├── resource_pool │ ├── __init__.py │ └── workflow_test.py ├── sleep_for_days │ ├── __init__.py │ └── workflow_test.py ├── trio_async │ ├── __init__.py │ └── workflow_test.py └── updatable_timer │ ├── __init__.py │ └── updatable_timer_test.py ├── trio_async ├── README.md ├── __init__.py ├── activities.py ├── starter.py ├── worker.py └── workflows.py ├── updatable_timer ├── README.md ├── __init__.py ├── starter.py ├── updatable_timer_lib.py ├── wake_up_time_updater.py ├── worker.py └── workflow.py ├── uv.lock ├── worker_specific_task_queues ├── README.md ├── __init__.py ├── demo_fs │ └── .gitignore ├── starter.py ├── static │ └── all-activitites-on-same-task-queue.png ├── tasks.py └── worker.py └── worker_versioning ├── README.md ├── __init__.py ├── activities.py ├── example.py ├── workflow_v1.py ├── workflow_v1_1.py └── workflow_v2.py /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @temporalio/sdk 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .venv 2 | .idea 3 | __pycache__ 4 | .vscode 5 | .DS_Store 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License 2 | 3 | Copyright (c) 2022 Temporal Technologies Inc. All rights reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. -------------------------------------------------------------------------------- /activity_worker/README.md: -------------------------------------------------------------------------------- 1 | # Activity Worker 2 | 3 | This sample shows a Go workflow calling a Python activity. 4 | 5 | First run the Go workflow worker by running this in the `go_workflow` directory in a separate terminal: 6 | 7 | go run . 8 | 9 | Then in another terminal, run the sample from this directory: 10 | 11 | uv run activity_worker.py 12 | 13 | The Python code will invoke the Go workflow which will execute the Python activity and return. -------------------------------------------------------------------------------- /activity_worker/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/activity_worker/__init__.py -------------------------------------------------------------------------------- /activity_worker/activity_worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import random 3 | import string 4 | 5 | from temporalio import activity 6 | from temporalio.client import Client 7 | from temporalio.worker import Worker 8 | 9 | task_queue = "say-hello-task-queue" 10 | workflow_name = "say-hello-workflow" 11 | activity_name = "say-hello-activity" 12 | 13 | 14 | @activity.defn(name=activity_name) 15 | async def say_hello_activity(name: str) -> str: 16 | return f"Hello, {name}!" 17 | 18 | 19 | async def main(): 20 | # Create client to localhost on default namespace 21 | client = await Client.connect("localhost:7233") 22 | 23 | # Run activity worker 24 | async with Worker(client, task_queue=task_queue, activities=[say_hello_activity]): 25 | # Run the Go workflow 26 | workflow_id = "".join( 27 | random.choices(string.ascii_uppercase + string.digits, k=30) 28 | ) 29 | result = await client.execute_workflow( 30 | workflow_name, "Temporal", id=workflow_id, task_queue=task_queue 31 | ) 32 | # Print out "Hello, Temporal!" 33 | print(result) 34 | 35 | 36 | if __name__ == "__main__": 37 | asyncio.run(main()) 38 | -------------------------------------------------------------------------------- /activity_worker/go_workflow/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/temporalio/samples-python/activity_worker/go_workflow 2 | 3 | go 1.17 4 | 5 | require go.temporal.io/sdk v1.14.0 6 | 7 | require ( 8 | github.com/davecgh/go-spew v1.1.1 // indirect 9 | github.com/facebookgo/clock v0.0.0-20150410010913-600d898af40a // indirect 10 | github.com/gogo/googleapis v1.4.1 // indirect 11 | github.com/gogo/protobuf v1.3.2 // indirect 12 | github.com/gogo/status v1.1.0 // indirect 13 | github.com/golang/mock v1.6.0 // indirect 14 | github.com/golang/protobuf v1.5.2 // indirect 15 | github.com/google/uuid v1.3.0 // indirect 16 | github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 // indirect 17 | github.com/pborman/uuid v1.2.1 // indirect 18 | github.com/pmezard/go-difflib v1.0.0 // indirect 19 | github.com/robfig/cron v1.2.0 // indirect 20 | github.com/stretchr/objx v0.3.0 // indirect 21 | github.com/stretchr/testify v1.7.0 // indirect 22 | go.temporal.io/api v1.7.1-0.20220223032354-6e6fe738916a // indirect 23 | go.uber.org/atomic v1.9.0 // indirect 24 | golang.org/x/net v0.7.0 // indirect 25 | golang.org/x/sys v0.5.0 // indirect 26 | golang.org/x/text v0.7.0 // indirect 27 | golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac // indirect 28 | google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf // indirect 29 | google.golang.org/grpc v1.44.0 // indirect 30 | google.golang.org/protobuf v1.27.1 // indirect 31 | gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect 32 | ) 33 | -------------------------------------------------------------------------------- /activity_worker/go_workflow/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "log" 5 | "time" 6 | 7 | "go.temporal.io/sdk/client" 8 | "go.temporal.io/sdk/temporal" 9 | "go.temporal.io/sdk/worker" 10 | "go.temporal.io/sdk/workflow" 11 | ) 12 | 13 | const ( 14 | taskQueue = "say-hello-task-queue" 15 | workflowName = "say-hello-workflow" 16 | activityName = "say-hello-activity" 17 | ) 18 | 19 | // SayHelloWorkflow simply returns the result of the say-hello activity. 20 | func SayHelloWorkflow(ctx workflow.Context, name string) (string, error) { 21 | ctx = workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ 22 | // Give it only 5 seconds to schedule and run with no retries 23 | ScheduleToCloseTimeout: 5 * time.Second, 24 | RetryPolicy: &temporal.RetryPolicy{MaximumAttempts: 1}, 25 | }) 26 | var response string 27 | err := workflow.ExecuteActivity(ctx, activityName, name).Get(ctx, &response) 28 | return response, err 29 | } 30 | 31 | func main() { 32 | // Create client to localhost on default namespace 33 | c, err := client.NewClient(client.Options{}) 34 | if err != nil { 35 | log.Fatalf("Failed creating client: %v", err) 36 | } 37 | defer c.Close() 38 | 39 | // Run workflow-only worker that does not handle activities 40 | w := worker.New(c, taskQueue, worker.Options{LocalActivityWorkerOnly: true}) 41 | w.RegisterWorkflowWithOptions(SayHelloWorkflow, workflow.RegisterOptions{Name: workflowName}) 42 | log.Printf("Starting worker (ctrl+c to exit)") 43 | if err := w.Run(worker.InterruptCh()); err != nil { 44 | log.Fatalf("Worker failed to start: %v", err) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /bedrock/README.md: -------------------------------------------------------------------------------- 1 | # AI Chatbot example using Amazon Bedrock 2 | 3 | Demonstrates how Temporal and Amazon Bedrock can be used to quickly build bulletproof AI applications. 4 | 5 | ## Samples 6 | 7 | * [basic](basic) - A basic Bedrock workflow to process a single prompt. 8 | * [signals_and_queries](signals_and_queries) - Extension to the basic workflow to allow multiple prompts through signals & queries. 9 | * [entity](entity) - Full multi-Turn chat using an entity workflow.. 10 | 11 | ## Pre-requisites 12 | 13 | 1. An AWS account with Bedrock enabled. 14 | 2. A machine that has access to Bedrock. 15 | 3. A local Temporal server running on the same machine. See [Temporal's dev server docs](https://docs.temporal.io/cli#start-dev-server) for more information. 16 | 17 | These examples use Amazon's Python SDK (Boto3). To configure Boto3 to use your AWS credentials, follow the instructions in [the Boto3 documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html). 18 | 19 | ## Running the samples 20 | 21 | For these sample, the optional `bedrock` dependency group must be included. To include, run: 22 | 23 | uv sync --group bedrock 24 | 25 | There are 3 Bedrock samples, see the README.md in each sub-directory for instructions on running each. -------------------------------------------------------------------------------- /bedrock/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/bedrock/__init__.py -------------------------------------------------------------------------------- /bedrock/basic/README.md: -------------------------------------------------------------------------------- 1 | # Basic Amazon Bedrock workflow 2 | 3 | A basic Bedrock workflow. Starts a workflow with a prompt, generates a response and ends the workflow. 4 | 5 | To run, first see `samples-python` [README.md](../../README.md), and `bedrock` [README.md](../README.md) for prerequisites specific to this sample. Once set up, run the following from this directory: 6 | 7 | 1. Run the worker: `uv run run_worker.py` 8 | 2. In another terminal run the client with a prompt: 9 | 10 | e.g. `uv run send_message.py 'What animals are marsupials?'` -------------------------------------------------------------------------------- /bedrock/basic/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/bedrock/basic/__init__.py -------------------------------------------------------------------------------- /bedrock/basic/run_worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import concurrent.futures 3 | import logging 4 | 5 | from temporalio.client import Client 6 | from temporalio.worker import Worker 7 | from workflows import BasicBedrockWorkflow 8 | 9 | from bedrock.shared.activities import BedrockActivities 10 | 11 | 12 | async def main(): 13 | # Create client connected to server at the given address 14 | client = await Client.connect("localhost:7233") 15 | activities = BedrockActivities() 16 | 17 | # Run the worker 18 | with concurrent.futures.ThreadPoolExecutor(max_workers=100) as activity_executor: 19 | worker = Worker( 20 | client, 21 | task_queue="bedrock-task-queue", 22 | workflows=[BasicBedrockWorkflow], 23 | activities=[activities.prompt_bedrock], 24 | activity_executor=activity_executor, 25 | ) 26 | await worker.run() 27 | 28 | 29 | if __name__ == "__main__": 30 | print("Starting worker") 31 | print("Then run 'python send_message.py \"\"'") 32 | 33 | logging.basicConfig(level=logging.INFO) 34 | 35 | asyncio.run(main()) 36 | -------------------------------------------------------------------------------- /bedrock/basic/send_message.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | 4 | from temporalio.client import Client 5 | from workflows import BasicBedrockWorkflow 6 | 7 | 8 | async def main(prompt: str) -> str: 9 | # Create client connected to server at the given address 10 | client = await Client.connect("localhost:7233") 11 | 12 | # Start the workflow 13 | workflow_id = "basic-bedrock-workflow" 14 | handle = await client.start_workflow( 15 | BasicBedrockWorkflow.run, 16 | prompt, # Initial prompt 17 | id=workflow_id, 18 | task_queue="bedrock-task-queue", 19 | ) 20 | return await handle.result() 21 | 22 | 23 | if __name__ == "__main__": 24 | if len(sys.argv) != 2: 25 | print("Usage: python send_message.py ''") 26 | print("Example: python send_message.py 'What animals are marsupials?'") 27 | else: 28 | result = asyncio.run(main(sys.argv[1])) 29 | print(result) 30 | -------------------------------------------------------------------------------- /bedrock/basic/workflows.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | 5 | with workflow.unsafe.imports_passed_through(): 6 | from bedrock.shared.activities import BedrockActivities 7 | 8 | 9 | @workflow.defn 10 | class BasicBedrockWorkflow: 11 | @workflow.run 12 | async def run(self, prompt: str) -> str: 13 | 14 | workflow.logger.info("Prompt: %s" % prompt) 15 | 16 | response = await workflow.execute_activity_method( 17 | BedrockActivities.prompt_bedrock, 18 | prompt, 19 | schedule_to_close_timeout=timedelta(seconds=20), 20 | ) 21 | 22 | workflow.logger.info("Response: %s" % response) 23 | 24 | return response 25 | -------------------------------------------------------------------------------- /bedrock/entity/README.md: -------------------------------------------------------------------------------- 1 | # Multi-turn chat with Amazon Bedrock Entity Workflow 2 | 3 | Multi-Turn Chat using an Entity Workflow. The workflow runs forever unless explicitly ended. The workflow continues as new after a configurable number of chat turns to keep the prompt size small and the Temporal event history small. Each continued-as-new workflow receives a summary of the conversation history so far for context. 4 | 5 | To run, first see `samples-python` [README.md](../../README.md), and `bedrock` [README.md](../README.md) for prerequisites specific to this sample. Once set up, run the following from this directory: 6 | 7 | 1. Run the worker: `uv run run_worker.py` 8 | 2. In another terminal run the client with a prompt. 9 | 10 | Example: `uv run send_message.py 'What animals are marsupials?'` 11 | 12 | 3. View the worker's output for the response. 13 | 4. Give followup prompts by signaling the workflow. 14 | 15 | Example: `uv run send_message.py 'Do they lay eggs?'` 16 | 5. Get the conversation history summary by querying the workflow. 17 | 18 | Example: `uv run get_history.py` 19 | 6. To end the chat session, run `uv run end_chat.py` 20 | -------------------------------------------------------------------------------- /bedrock/entity/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/bedrock/entity/__init__.py -------------------------------------------------------------------------------- /bedrock/entity/end_chat.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | 4 | from temporalio.client import Client 5 | from workflows import EntityBedrockWorkflow 6 | 7 | 8 | async def main(): 9 | # Create client connected to server at the given address 10 | client = await Client.connect("localhost:7233") 11 | 12 | workflow_id = "entity-bedrock-workflow" 13 | 14 | handle = client.get_workflow_handle_for(EntityBedrockWorkflow.run, workflow_id) 15 | 16 | # Sends a signal to the workflow 17 | await handle.signal(EntityBedrockWorkflow.end_chat) 18 | 19 | 20 | if __name__ == "__main__": 21 | print("Sending signal to end chat.") 22 | asyncio.run(main()) 23 | -------------------------------------------------------------------------------- /bedrock/entity/get_history.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | from workflows import EntityBedrockWorkflow 5 | 6 | 7 | async def main(): 8 | # Create client connected to server at the given address 9 | client = await Client.connect("localhost:7233") 10 | workflow_id = "entity-bedrock-workflow" 11 | 12 | handle = client.get_workflow_handle(workflow_id) 13 | 14 | # Queries the workflow for the conversation history 15 | history = await handle.query(EntityBedrockWorkflow.get_conversation_history) 16 | 17 | print("Conversation History") 18 | print( 19 | *(f"{speaker.title()}: {message}\n" for speaker, message in history), sep="\n" 20 | ) 21 | 22 | # Queries the workflow for the conversation summary 23 | summary = await handle.query(EntityBedrockWorkflow.get_summary_from_history) 24 | 25 | if summary is not None: 26 | print("Conversation Summary:") 27 | print(summary) 28 | 29 | 30 | if __name__ == "__main__": 31 | asyncio.run(main()) 32 | -------------------------------------------------------------------------------- /bedrock/entity/run_worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import concurrent.futures 3 | import logging 4 | 5 | from temporalio.client import Client 6 | from temporalio.worker import Worker 7 | from workflows import EntityBedrockWorkflow 8 | 9 | from bedrock.shared.activities import BedrockActivities 10 | 11 | 12 | async def main(): 13 | # Create client connected to server at the given address 14 | client = await Client.connect("localhost:7233") 15 | activities = BedrockActivities() 16 | 17 | # Run the worker 18 | with concurrent.futures.ThreadPoolExecutor(max_workers=100) as activity_executor: 19 | worker = Worker( 20 | client, 21 | task_queue="bedrock-task-queue", 22 | workflows=[EntityBedrockWorkflow], 23 | activities=[activities.prompt_bedrock], 24 | activity_executor=activity_executor, 25 | ) 26 | await worker.run() 27 | 28 | 29 | if __name__ == "__main__": 30 | print("Starting worker") 31 | print("Then run 'python send_message.py \"\"'") 32 | 33 | logging.basicConfig(level=logging.INFO) 34 | 35 | asyncio.run(main()) 36 | -------------------------------------------------------------------------------- /bedrock/entity/send_message.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | 4 | from temporalio.client import Client 5 | from workflows import BedrockParams, EntityBedrockWorkflow 6 | 7 | 8 | async def main(prompt): 9 | # Create client connected to server at the given address 10 | client = await Client.connect("localhost:7233") 11 | 12 | workflow_id = "entity-bedrock-workflow" 13 | 14 | # Sends a signal to the workflow (and starts it if needed) 15 | await client.start_workflow( 16 | EntityBedrockWorkflow.run, 17 | BedrockParams(None, None), 18 | id=workflow_id, 19 | task_queue="bedrock-task-queue", 20 | start_signal="user_prompt", 21 | start_signal_args=[prompt], 22 | ) 23 | 24 | 25 | if __name__ == "__main__": 26 | if len(sys.argv) != 2: 27 | print("Usage: python send_message.py ''") 28 | print("Example: python send_message.py 'What animals are marsupials?'") 29 | else: 30 | asyncio.run(main(sys.argv[1])) 31 | -------------------------------------------------------------------------------- /bedrock/shared/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/bedrock/shared/__init__.py -------------------------------------------------------------------------------- /bedrock/shared/activities.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import boto3 4 | from botocore.config import Config 5 | from temporalio import activity 6 | 7 | config = Config(region_name="us-west-2") 8 | 9 | 10 | class BedrockActivities: 11 | def __init__(self) -> None: 12 | self.bedrock = boto3.client(service_name="bedrock-runtime", config=config) 13 | 14 | @activity.defn 15 | def prompt_bedrock(self, prompt: str) -> str: 16 | # Model params 17 | modelId = "meta.llama2-70b-chat-v1" 18 | accept = "application/json" 19 | contentType = "application/json" 20 | max_gen_len = 512 21 | temperature = 0.1 22 | top_p = 0.2 23 | 24 | body = json.dumps( 25 | { 26 | "prompt": prompt, 27 | "max_gen_len": max_gen_len, 28 | "temperature": temperature, 29 | "top_p": top_p, 30 | } 31 | ) 32 | 33 | response = self.bedrock.invoke_model( 34 | body=body, modelId=modelId, accept=accept, contentType=contentType 35 | ) 36 | 37 | response_body = json.loads(response.get("body").read()) 38 | 39 | return response_body.get("generation") 40 | -------------------------------------------------------------------------------- /bedrock/signals_and_queries/README.md: -------------------------------------------------------------------------------- 1 | # Amazon Bedrock workflow using Signals and Queries 2 | 3 | Adding signals & queries to the [basic Bedrock sample](../1_basic). Starts a workflow with a prompt, allows follow-up prompts to be given using Temporal signals, and allows the conversation history to be queried using Temporal queries. 4 | 5 | To run, first see `samples-python` [README.md](../../README.md), and `bedrock` [README.md](../README.md) for prerequisites specific to this sample. Once set up, run the following from this directory: 6 | 7 | 1. Run the worker: `uv run run_worker.py` 8 | 2. In another terminal run the client with a prompt. 9 | 10 | Example: `uv run send_message.py 'What animals are marsupials?'` 11 | 12 | 3. View the worker's output for the response. 13 | 4. Give followup prompts by signaling the workflow. 14 | 15 | Example: `uv run send_message.py 'Do they lay eggs?'` 16 | 5. Get the conversation history by querying the workflow. 17 | 18 | Example: `uv run get_history.py` 19 | 6. The workflow will timeout after inactivity. 20 | -------------------------------------------------------------------------------- /bedrock/signals_and_queries/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/bedrock/signals_and_queries/__init__.py -------------------------------------------------------------------------------- /bedrock/signals_and_queries/get_history.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | from workflows import SignalQueryBedrockWorkflow 5 | 6 | 7 | async def main(): 8 | # Create client connected to server at the given address 9 | client = await Client.connect("localhost:7233") 10 | workflow_id = "bedrock-workflow-with-signals" 11 | 12 | handle = client.get_workflow_handle(workflow_id) 13 | 14 | # Queries the workflow for the conversation history 15 | history = await handle.query(SignalQueryBedrockWorkflow.get_conversation_history) 16 | 17 | print("Conversation History") 18 | print( 19 | *(f"{speaker.title()}: {message}\n" for speaker, message in history), sep="\n" 20 | ) 21 | 22 | # Queries the workflow for the conversation summary 23 | summary = await handle.query(SignalQueryBedrockWorkflow.get_summary_from_history) 24 | 25 | if summary is not None: 26 | print("Conversation Summary:") 27 | print(summary) 28 | 29 | 30 | if __name__ == "__main__": 31 | asyncio.run(main()) 32 | -------------------------------------------------------------------------------- /bedrock/signals_and_queries/run_worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import concurrent.futures 3 | import logging 4 | 5 | from temporalio.client import Client 6 | from temporalio.worker import Worker 7 | from workflows import SignalQueryBedrockWorkflow 8 | 9 | from bedrock.shared.activities import BedrockActivities 10 | 11 | 12 | async def main(): 13 | # Create client connected to server at the given address 14 | client = await Client.connect("localhost:7233") 15 | activities = BedrockActivities() 16 | 17 | # Run the worker 18 | with concurrent.futures.ThreadPoolExecutor(max_workers=100) as activity_executor: 19 | worker = Worker( 20 | client, 21 | task_queue="bedrock-task-queue", 22 | workflows=[SignalQueryBedrockWorkflow], 23 | activities=[activities.prompt_bedrock], 24 | activity_executor=activity_executor, 25 | ) 26 | await worker.run() 27 | 28 | 29 | if __name__ == "__main__": 30 | print("Starting worker") 31 | print("Then run 'python send_message.py \"\"'") 32 | 33 | logging.basicConfig(level=logging.INFO) 34 | 35 | asyncio.run(main()) 36 | -------------------------------------------------------------------------------- /bedrock/signals_and_queries/send_message.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | 4 | from temporalio.client import Client 5 | from workflows import SignalQueryBedrockWorkflow 6 | 7 | 8 | async def main(prompt): 9 | # Create client connected to server at the given address 10 | client = await Client.connect("localhost:7233") 11 | 12 | workflow_id = "bedrock-workflow-with-signals" 13 | inactivity_timeout_minutes = 1 14 | 15 | # Sends a signal to the workflow (and starts it if needed) 16 | await client.start_workflow( 17 | SignalQueryBedrockWorkflow.run, 18 | inactivity_timeout_minutes, 19 | id=workflow_id, 20 | task_queue="bedrock-task-queue", 21 | start_signal="user_prompt", 22 | start_signal_args=[prompt], 23 | ) 24 | 25 | 26 | if __name__ == "__main__": 27 | if len(sys.argv) != 2: 28 | print("Usage: python send_message.py ''") 29 | print("Example: python send_message.py 'What animals are marsupials?'") 30 | else: 31 | asyncio.run(main(sys.argv[1])) 32 | -------------------------------------------------------------------------------- /cloud_export_to_parquet/README.md: -------------------------------------------------------------------------------- 1 | # Cloud Export to parquet sample 2 | 3 | This is an example workflow to convert exported file from proto to parquet file. The workflow is an hourly schedule. 4 | 5 | Please make sure your python is 3.9 above. For this sample, run: 6 | 7 | uv sync --group=cloud-export-to-parquet 8 | 9 | Before you start, please modify workflow input in `create_schedule.py` with your s3 bucket and namespace. Also make sure you've the right AWS permission set up in your environment to allow this workflow read and write to your s3 bucket. 10 | 11 | To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory to start the worker: 12 | 13 | ```bash 14 | uv run run_worker.py 15 | ``` 16 | 17 | This will start the worker. Then, in another terminal, run the following to execute the schedule: 18 | 19 | ```bash 20 | uv run create_schedule.py 21 | ``` 22 | 23 | The workflow should convert exported file in your input s3 bucket to parquet in your specified location. 24 | -------------------------------------------------------------------------------- /cloud_export_to_parquet/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/cloud_export_to_parquet/__init__.py -------------------------------------------------------------------------------- /cloud_export_to_parquet/run_worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from concurrent.futures import ThreadPoolExecutor 3 | 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | from temporalio.worker.workflow_sandbox import ( 7 | SandboxedWorkflowRunner, 8 | SandboxRestrictions, 9 | ) 10 | 11 | from cloud_export_to_parquet.data_trans_activities import ( 12 | data_trans_and_land, 13 | get_object_keys, 14 | ) 15 | from cloud_export_to_parquet.workflows import ProtoToParquet 16 | 17 | 18 | async def main() -> None: 19 | """Main worker function.""" 20 | # Create client connected to server at the given address 21 | client = await Client.connect("localhost:7233") 22 | 23 | # Run the worker 24 | worker: Worker = Worker( 25 | client, 26 | task_queue="DATA_TRANSFORMATION_TASK_QUEUE", 27 | workflows=[ProtoToParquet], 28 | activities=[get_object_keys, data_trans_and_land], 29 | workflow_runner=SandboxedWorkflowRunner( 30 | restrictions=SandboxRestrictions.default.with_passthrough_modules("boto3") 31 | ), 32 | activity_executor=ThreadPoolExecutor(100), 33 | ) 34 | await worker.run() 35 | 36 | 37 | if __name__ == "__main__": 38 | asyncio.run(main()) 39 | -------------------------------------------------------------------------------- /context_propagation/README.md: -------------------------------------------------------------------------------- 1 | # Context Propagation Interceptor Sample 2 | 3 | This sample shows how to use an interceptor to propagate contextual information through workflows and activities. For 4 | this example, [contextvars](https://docs.python.org/3/library/contextvars.html) holds the contextual information. 5 | 6 | To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory to start the 7 | worker: 8 | 9 | uv run worker.py 10 | 11 | This will start the worker. Then, in another terminal, run the following to execute the workflow: 12 | 13 | uv run starter.py 14 | 15 | The starter terminal should complete with the hello result and the worker terminal should show the logs with the 16 | propagated user ID contextual information flowing through the workflows/activities. -------------------------------------------------------------------------------- /context_propagation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/context_propagation/__init__.py -------------------------------------------------------------------------------- /context_propagation/activities.py: -------------------------------------------------------------------------------- 1 | from temporalio import activity 2 | 3 | from context_propagation import shared 4 | 5 | 6 | @activity.defn 7 | async def say_hello_activity(name: str) -> str: 8 | activity.logger.info(f"Activity called by user {shared.user_id.get()}") 9 | return f"Hello, {name}" 10 | -------------------------------------------------------------------------------- /context_propagation/shared.py: -------------------------------------------------------------------------------- 1 | from contextvars import ContextVar 2 | from typing import Optional 3 | 4 | HEADER_KEY = "__my_user_id" 5 | 6 | user_id: ContextVar[Optional[str]] = ContextVar("user_id", default=None) 7 | -------------------------------------------------------------------------------- /context_propagation/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from temporalio.client import Client 5 | 6 | from context_propagation import interceptor, shared, workflows 7 | 8 | 9 | async def main(): 10 | logging.basicConfig(level=logging.INFO) 11 | 12 | # Set the user ID 13 | shared.user_id.set("some-user") 14 | 15 | # Connect client 16 | client = await Client.connect( 17 | "localhost:7233", 18 | # Use our interceptor 19 | interceptors=[interceptor.ContextPropagationInterceptor()], 20 | ) 21 | 22 | # Start workflow, send signal, wait for completion, issue query 23 | handle = await client.start_workflow( 24 | workflows.SayHelloWorkflow.run, 25 | "Temporal", 26 | id=f"context-propagation-workflow-id", 27 | task_queue="context-propagation-task-queue", 28 | ) 29 | await handle.signal(workflows.SayHelloWorkflow.signal_complete) 30 | result = await handle.result() 31 | logging.info(f"Workflow result: {result}") 32 | 33 | 34 | if __name__ == "__main__": 35 | asyncio.run(main()) 36 | -------------------------------------------------------------------------------- /context_propagation/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from context_propagation import activities, interceptor, workflows 8 | 9 | interrupt_event = asyncio.Event() 10 | 11 | 12 | async def main(): 13 | logging.basicConfig(level=logging.INFO) 14 | 15 | # Connect client 16 | client = await Client.connect( 17 | "localhost:7233", 18 | # Use our interceptor 19 | interceptors=[interceptor.ContextPropagationInterceptor()], 20 | ) 21 | 22 | # Run a worker for the workflow 23 | async with Worker( 24 | client, 25 | task_queue="context-propagation-task-queue", 26 | activities=[activities.say_hello_activity], 27 | workflows=[workflows.SayHelloWorkflow], 28 | ): 29 | # Wait until interrupted 30 | logging.info("Worker started, ctrl+c to exit") 31 | await interrupt_event.wait() 32 | logging.info("Shutting down") 33 | 34 | 35 | if __name__ == "__main__": 36 | loop = asyncio.new_event_loop() 37 | try: 38 | loop.run_until_complete(main()) 39 | except KeyboardInterrupt: 40 | interrupt_event.set() 41 | loop.run_until_complete(loop.shutdown_asyncgens()) 42 | -------------------------------------------------------------------------------- /context_propagation/workflows.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | 5 | with workflow.unsafe.imports_passed_through(): 6 | from context_propagation.activities import say_hello_activity 7 | from context_propagation.shared import user_id 8 | 9 | 10 | @workflow.defn 11 | class SayHelloWorkflow: 12 | def __init__(self) -> None: 13 | self._complete = False 14 | 15 | @workflow.run 16 | async def run(self, name: str) -> str: 17 | workflow.logger.info(f"Workflow called by user {user_id.get()}") 18 | 19 | # Wait for signal then run activity 20 | await workflow.wait_condition(lambda: self._complete) 21 | return await workflow.execute_activity( 22 | say_hello_activity, name, start_to_close_timeout=timedelta(minutes=5) 23 | ) 24 | 25 | @workflow.signal 26 | async def signal_complete(self) -> None: 27 | workflow.logger.info(f"Signal called by user {user_id.get()}") 28 | self._complete = True 29 | -------------------------------------------------------------------------------- /custom_converter/README.md: -------------------------------------------------------------------------------- 1 | # Custom Converter Sample 2 | 3 | This sample shows how to make a custom payload converter for a type not natively supported by Temporal. 4 | 5 | To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory to start the 6 | worker: 7 | 8 | uv run worker.py 9 | 10 | This will start the worker. Then, in another terminal, run the following to execute the workflow: 11 | 12 | uv run starter.py 13 | 14 | The workflow should complete with the hello result. If the custom converter was not set for the custom input and output 15 | classes, we would get an error on the client side and on the worker side. -------------------------------------------------------------------------------- /custom_converter/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/custom_converter/__init__.py -------------------------------------------------------------------------------- /custom_converter/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | 5 | from custom_converter.shared import ( 6 | GreetingInput, 7 | GreetingOutput, 8 | greeting_data_converter, 9 | ) 10 | from custom_converter.workflow import GreetingWorkflow 11 | 12 | 13 | async def main(): 14 | # Connect client 15 | client = await Client.connect( 16 | "localhost:7233", 17 | # Without this we get: 18 | # TypeError: Object of type GreetingInput is not JSON serializable 19 | data_converter=greeting_data_converter, 20 | ) 21 | 22 | # Run workflow 23 | result = await client.execute_workflow( 24 | GreetingWorkflow.run, 25 | GreetingInput("Temporal"), 26 | id=f"custom_converter-workflow-id", 27 | task_queue="custom_converter-task-queue", 28 | ) 29 | assert isinstance(result, GreetingOutput) 30 | print(f"Workflow result: {result.result}") 31 | 32 | 33 | if __name__ == "__main__": 34 | asyncio.run(main()) 35 | -------------------------------------------------------------------------------- /custom_converter/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | from temporalio.worker import Worker 5 | 6 | from custom_converter.shared import greeting_data_converter 7 | from custom_converter.workflow import GreetingWorkflow 8 | 9 | interrupt_event = asyncio.Event() 10 | 11 | 12 | async def main(): 13 | # Connect client 14 | client = await Client.connect( 15 | "localhost:7233", 16 | # Without this, when trying to run a workflow, we get: 17 | # KeyError: 'Unknown payload encoding my-greeting-encoding 18 | data_converter=greeting_data_converter, 19 | ) 20 | 21 | # Run a worker for the workflow 22 | async with Worker( 23 | client, 24 | task_queue="custom_converter-task-queue", 25 | workflows=[GreetingWorkflow], 26 | ): 27 | # Wait until interrupted 28 | print("Worker started, ctrl+c to exit") 29 | await interrupt_event.wait() 30 | print("Shutting down") 31 | 32 | 33 | if __name__ == "__main__": 34 | loop = asyncio.new_event_loop() 35 | try: 36 | loop.run_until_complete(main()) 37 | except KeyboardInterrupt: 38 | interrupt_event.set() 39 | loop.run_until_complete(loop.shutdown_asyncgens()) 40 | -------------------------------------------------------------------------------- /custom_converter/workflow.py: -------------------------------------------------------------------------------- 1 | from temporalio import workflow 2 | 3 | with workflow.unsafe.imports_passed_through(): 4 | from custom_converter.shared import GreetingInput, GreetingOutput 5 | 6 | 7 | @workflow.defn 8 | class GreetingWorkflow: 9 | @workflow.run 10 | async def run(self, input: GreetingInput) -> GreetingOutput: 11 | return GreetingOutput(f"Hello, {input.name}") 12 | -------------------------------------------------------------------------------- /custom_decorator/README.md: -------------------------------------------------------------------------------- 1 | # Custom Decorator Sample 2 | 3 | This sample shows a custom decorator can help with Temporal code reuse. Specifically, this makes a `@auto_heartbeater` 4 | decorator that automatically configures an activity to heartbeat twice as frequently as the heartbeat timeout is set to. 5 | 6 | To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory to start the 7 | worker: 8 | 9 | uv run worker.py 10 | 11 | This will start the worker. Then, in another terminal, run the following to execute the workflow: 12 | 13 | uv run starter.py 14 | 15 | The workflow will be started, and then after 5 seconds will be sent a signal to cancel its forever-running activity. 16 | The activity has a heartbeat timeout set to 2s, so since it has the `@auto_heartbeater` decorator set, it will heartbeat 17 | every second. If this was not set, the workflow would fail with an activity heartbeat timeout failure. -------------------------------------------------------------------------------- /custom_decorator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/custom_decorator/__init__.py -------------------------------------------------------------------------------- /custom_decorator/activity_utils.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from datetime import datetime 3 | from functools import wraps 4 | from typing import Any, Awaitable, Callable, TypeVar, cast 5 | 6 | from temporalio import activity 7 | 8 | F = TypeVar("F", bound=Callable[..., Awaitable[Any]]) 9 | 10 | 11 | def auto_heartbeater(fn: F) -> F: 12 | # We want to ensure that the type hints from the original callable are 13 | # available via our wrapper, so we use the functools wraps decorator 14 | @wraps(fn) 15 | async def wrapper(*args, **kwargs): 16 | heartbeat_timeout = activity.info().heartbeat_timeout 17 | heartbeat_task = None 18 | if heartbeat_timeout: 19 | # Heartbeat twice as often as the timeout 20 | heartbeat_task = asyncio.create_task( 21 | heartbeat_every(heartbeat_timeout.total_seconds() / 2) 22 | ) 23 | try: 24 | return await fn(*args, **kwargs) 25 | finally: 26 | if heartbeat_task: 27 | heartbeat_task.cancel() 28 | # Wait for heartbeat cancellation to complete 29 | await asyncio.wait([heartbeat_task]) 30 | 31 | return cast(F, wrapper) 32 | 33 | 34 | async def heartbeat_every(delay: float, *details: Any) -> None: 35 | # Heartbeat every so often while not cancelled 36 | while True: 37 | await asyncio.sleep(delay) 38 | print(f"Heartbeating at {datetime.now()}") 39 | activity.heartbeat(*details) 40 | -------------------------------------------------------------------------------- /custom_decorator/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | 5 | from custom_decorator.worker import WaitForCancelWorkflow 6 | 7 | 8 | async def main(): 9 | # Connect client 10 | client = await Client.connect("localhost:7233") 11 | 12 | # Start the workflow 13 | handle = await client.start_workflow( 14 | WaitForCancelWorkflow.run, 15 | id=f"custom_decorator-workflow-id", 16 | task_queue="custom_decorator-task-queue", 17 | ) 18 | print("Started workflow, waiting 5 seconds before cancelling") 19 | await asyncio.sleep(5) 20 | 21 | # Send a signal asking workflow to cancel the activity 22 | await handle.signal(WaitForCancelWorkflow.cancel_activity) 23 | 24 | # Wait and expect to be told about the activity being cancelled. If we did 25 | # not have the automatic heartbeater decorator, the signal would have failed 26 | # because the workflow would already be completed as failed with activity 27 | # heartbeat timeout. 28 | result = await handle.result() 29 | print(f"Result: {result}") 30 | 31 | 32 | if __name__ == "__main__": 33 | asyncio.run(main()) 34 | -------------------------------------------------------------------------------- /custom_metric/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/custom_metric/__init__.py -------------------------------------------------------------------------------- /custom_metric/activity.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from temporalio import activity 4 | 5 | 6 | @activity.defn 7 | def print_and_sleep(): 8 | print("In the activity.") 9 | time.sleep(1) 10 | -------------------------------------------------------------------------------- /custom_metric/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import uuid 3 | 4 | from temporalio.client import Client 5 | 6 | from custom_metric.workflow import StartTwoActivitiesWorkflow 7 | 8 | 9 | async def main(): 10 | 11 | client = await Client.connect( 12 | "localhost:7233", 13 | ) 14 | 15 | await client.start_workflow( 16 | StartTwoActivitiesWorkflow.run, 17 | id="execute-activity-workflow-" + str(uuid.uuid4()), 18 | task_queue="custom-metric-task-queue", 19 | ) 20 | 21 | 22 | if __name__ == "__main__": 23 | asyncio.run(main()) 24 | -------------------------------------------------------------------------------- /custom_metric/workflow.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from datetime import timedelta 3 | 4 | from temporalio import workflow 5 | 6 | with workflow.unsafe.imports_passed_through(): 7 | from custom_metric.activity import print_and_sleep 8 | 9 | 10 | @workflow.defn 11 | class StartTwoActivitiesWorkflow: 12 | @workflow.run 13 | async def run(self): 14 | # Request two concurrent activities with only one task slot so 15 | # we can see nontrivial schedule to start times. 16 | activity1 = workflow.execute_activity( 17 | print_and_sleep, 18 | start_to_close_timeout=timedelta(seconds=5), 19 | ) 20 | activity2 = workflow.execute_activity( 21 | print_and_sleep, 22 | start_to_close_timeout=timedelta(seconds=5), 23 | ) 24 | await asyncio.gather(activity1, activity2) 25 | return None 26 | -------------------------------------------------------------------------------- /dsl/README.md: -------------------------------------------------------------------------------- 1 | # DSL Sample 2 | 3 | This sample shows how to have a workflow interpret/invoke arbitrary steps defined in a DSL. It is similar to the DSL 4 | samples [in TypeScript](https://github.com/temporalio/samples-typescript/tree/main/dsl-interpreter) and 5 | [in Go](https://github.com/temporalio/samples-go/tree/main/dsl). 6 | 7 | For this sample, the optional `dsl` dependency group must be included. To include, run: 8 | 9 | uv sync --group dsl 10 | 11 | To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory to start the 12 | worker: 13 | 14 | uv run worker.py 15 | 16 | This will start the worker. Then, in another terminal, run the following to execute a workflow of steps defined in 17 | [workflow1.yaml](workflow1.yaml): 18 | 19 | uv run starter.py workflow1.yaml 20 | 21 | This will run the workflow and show the final variables that the workflow returns. Looking in the worker terminal, each 22 | step executed will be visible. 23 | 24 | Similarly we can do the same for the more advanced [workflow2.yaml](workflow2.yaml) file: 25 | 26 | uv run starter.py workflow2.yaml 27 | 28 | This sample gives a guide of how one can write a workflow to interpret arbitrary steps from a user-provided DSL. Many 29 | DSL models are more advanced and are more specific to conform to business logic needs. -------------------------------------------------------------------------------- /dsl/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/dsl/__init__.py -------------------------------------------------------------------------------- /dsl/activities.py: -------------------------------------------------------------------------------- 1 | from temporalio import activity 2 | 3 | 4 | class DSLActivities: 5 | @activity.defn 6 | async def activity1(self, arg: str) -> str: 7 | activity.logger.info(f"Executing activity1 with arg: {arg}") 8 | return f"[result from activity1: {arg}]" 9 | 10 | @activity.defn 11 | async def activity2(self, arg: str) -> str: 12 | activity.logger.info(f"Executing activity2 with arg: {arg}") 13 | return f"[result from activity2: {arg}]" 14 | 15 | @activity.defn 16 | async def activity3(self, arg1: str, arg2: str) -> str: 17 | activity.logger.info(f"Executing activity3 with args: {arg1} and {arg2}") 18 | return f"[result from activity3: {arg1} {arg2}]" 19 | 20 | @activity.defn 21 | async def activity4(self, arg: str) -> str: 22 | activity.logger.info(f"Executing activity4 with arg: {arg}") 23 | return f"[result from activity4: {arg}]" 24 | 25 | @activity.defn 26 | async def activity5(self, arg1: str, arg2: str) -> str: 27 | activity.logger.info(f"Executing activity5 with args: {arg1} and {arg2}") 28 | return f"[result from activity5: {arg1} {arg2}]" 29 | -------------------------------------------------------------------------------- /dsl/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import sys 4 | import uuid 5 | 6 | import dacite 7 | import yaml 8 | from temporalio.client import Client 9 | 10 | from dsl.workflow import DSLInput, DSLWorkflow 11 | 12 | 13 | async def main(dsl_yaml: str) -> None: 14 | # Convert the YAML to our dataclass structure. We use PyYAML + dacite to do 15 | # this but it can be done any number of ways. 16 | dsl_input = dacite.from_dict(DSLInput, yaml.safe_load(dsl_yaml)) 17 | 18 | # Connect client 19 | client = await Client.connect("localhost:7233") 20 | 21 | # Run workflow 22 | result = await client.execute_workflow( 23 | DSLWorkflow.run, 24 | dsl_input, 25 | id=f"dsl-workflow-id-{uuid.uuid4()}", 26 | task_queue="dsl-task-queue", 27 | ) 28 | logging.info( 29 | f"Final variables:\n " 30 | + "\n ".join((f"{k}: {v}" for k, v in result.items())) 31 | ) 32 | 33 | 34 | if __name__ == "__main__": 35 | logging.basicConfig(level=logging.INFO) 36 | 37 | # Require the YAML file as an argument. We read this _outside_ of the async 38 | # def function because thread-blocking IO should never happen in async def 39 | # functions. 40 | if len(sys.argv) != 2: 41 | raise RuntimeError("Expected single argument for YAML file") 42 | with open(sys.argv[1], "r") as yaml_file: 43 | dsl_yaml = yaml_file.read() 44 | 45 | # Run 46 | asyncio.run(main(dsl_yaml)) 47 | -------------------------------------------------------------------------------- /dsl/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from dsl.activities import DSLActivities 8 | from dsl.workflow import DSLWorkflow 9 | 10 | interrupt_event = asyncio.Event() 11 | 12 | 13 | async def main(): 14 | # Connect client 15 | client = await Client.connect("localhost:7233") 16 | 17 | # Run a worker for the activities and workflow 18 | activities = DSLActivities() 19 | async with Worker( 20 | client, 21 | task_queue="dsl-task-queue", 22 | activities=[ 23 | activities.activity1, 24 | activities.activity2, 25 | activities.activity3, 26 | activities.activity4, 27 | activities.activity5, 28 | ], 29 | workflows=[DSLWorkflow], 30 | ): 31 | # Wait until interrupted 32 | logging.info("Worker started, ctrl+c to exit") 33 | await interrupt_event.wait() 34 | logging.info("Shutting down") 35 | 36 | 37 | if __name__ == "__main__": 38 | logging.basicConfig(level=logging.INFO) 39 | loop = asyncio.new_event_loop() 40 | try: 41 | loop.run_until_complete(main()) 42 | except KeyboardInterrupt: 43 | interrupt_event.set() 44 | loop.run_until_complete(loop.shutdown_asyncgens()) 45 | -------------------------------------------------------------------------------- /dsl/workflow1.yaml: -------------------------------------------------------------------------------- 1 | # This sample workflows execute 3 steps in sequence. 2 | # 1) Activity1, takes arg1 as input, and put result as result1. 3 | # 2) Activity2, takes result1 as input, and put result as result2. 4 | # 3) Activity3, takes args2 and result2 as input, and put result as result3. 5 | 6 | variables: 7 | arg1: value1 8 | arg2: value2 9 | 10 | root: 11 | sequence: 12 | elements: 13 | - activity: 14 | name: activity1 15 | arguments: 16 | - arg1 17 | result: result1 18 | - activity: 19 | name: activity2 20 | arguments: 21 | - result1 22 | result: result2 23 | - activity: 24 | name: activity3 25 | arguments: 26 | - arg2 27 | - result2 28 | result: result3 -------------------------------------------------------------------------------- /dsl/workflow2.yaml: -------------------------------------------------------------------------------- 1 | # This sample workflow executes 3 steps in sequence. 2 | # 1) activity1, takes arg1 as input, and put result as result1. 3 | # 2) it runs a parallel block which runs below sequence branches in parallel 4 | # 2.1) sequence 1 5 | # 2.1.1) activity2, takes result1 as input, and put result as result2 6 | # 2.1.2) activity3, takes arg2 and result2 as input, and put result as result3 7 | # 2.2) sequence 2 8 | # 2.2.1) activity4, takes result1 as input, and put result as result4 9 | # 2.2.2) activity5, takes arg3 and result4 as input, and put result as result5 10 | # 3) activity3, takes result3 and result5 as input, and put result as result6. 11 | 12 | variables: 13 | arg1: value1 14 | arg2: value2 15 | arg3: value3 16 | 17 | root: 18 | sequence: 19 | elements: 20 | - activity: 21 | name: activity1 22 | arguments: 23 | - arg1 24 | result: result1 25 | - parallel: 26 | branches: 27 | - sequence: 28 | elements: 29 | - activity: 30 | name: activity2 31 | arguments: 32 | - result1 33 | result: result2 34 | - activity: 35 | name: activity3 36 | arguments: 37 | - arg2 38 | - result2 39 | result: result3 40 | - sequence: 41 | elements: 42 | - activity: 43 | name: activity4 44 | arguments: 45 | - result1 46 | result: result4 47 | - activity: 48 | name: activity5 49 | arguments: 50 | - arg3 51 | - result4 52 | result: result5 53 | - activity: 54 | name: activity3 55 | arguments: 56 | - result3 57 | - result5 58 | result: result6 -------------------------------------------------------------------------------- /encryption/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/encryption/__init__.py -------------------------------------------------------------------------------- /encryption/codec_server.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from typing import Awaitable, Callable, Iterable, List 3 | 4 | from aiohttp import hdrs, web 5 | from google.protobuf import json_format 6 | from temporalio.api.common.v1 import Payload, Payloads 7 | 8 | from encryption.codec import EncryptionCodec 9 | 10 | 11 | def build_codec_server() -> web.Application: 12 | # Cors handler 13 | async def cors_options(req: web.Request) -> web.Response: 14 | resp = web.Response() 15 | if req.headers.get(hdrs.ORIGIN) == "http://localhost:8233": 16 | resp.headers[hdrs.ACCESS_CONTROL_ALLOW_ORIGIN] = "http://localhost:8233" 17 | resp.headers[hdrs.ACCESS_CONTROL_ALLOW_METHODS] = "POST" 18 | resp.headers[hdrs.ACCESS_CONTROL_ALLOW_HEADERS] = "content-type,x-namespace" 19 | return resp 20 | 21 | # General purpose payloads-to-payloads 22 | async def apply( 23 | fn: Callable[[Iterable[Payload]], Awaitable[List[Payload]]], req: web.Request 24 | ) -> web.Response: 25 | # Read payloads as JSON 26 | assert req.content_type == "application/json" 27 | payloads = json_format.Parse(await req.read(), Payloads()) 28 | 29 | # Apply 30 | payloads = Payloads(payloads=await fn(payloads.payloads)) 31 | 32 | # Apply CORS and return JSON 33 | resp = await cors_options(req) 34 | resp.content_type = "application/json" 35 | resp.text = json_format.MessageToJson(payloads) 36 | return resp 37 | 38 | # Build app 39 | codec = EncryptionCodec() 40 | app = web.Application() 41 | app.add_routes( 42 | [ 43 | web.post("/encode", partial(apply, codec.encode)), 44 | web.post("/decode", partial(apply, codec.decode)), 45 | web.options("/decode", cors_options), 46 | ] 47 | ) 48 | return app 49 | 50 | 51 | if __name__ == "__main__": 52 | web.run_app(build_codec_server(), host="127.0.0.1", port=8081) 53 | -------------------------------------------------------------------------------- /encryption/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import dataclasses 3 | 4 | import temporalio.converter 5 | from temporalio.client import Client 6 | 7 | from encryption.codec import EncryptionCodec 8 | from encryption.worker import GreetingWorkflow 9 | 10 | 11 | async def main(): 12 | # Connect client 13 | client = await Client.connect( 14 | "localhost:7233", 15 | # Use the default converter, but change the codec 16 | data_converter=dataclasses.replace( 17 | temporalio.converter.default(), payload_codec=EncryptionCodec() 18 | ), 19 | ) 20 | 21 | # Run workflow 22 | result = await client.execute_workflow( 23 | GreetingWorkflow.run, 24 | "Temporal", 25 | id=f"encryption-workflow-id", 26 | task_queue="encryption-task-queue", 27 | ) 28 | print(f"Workflow result: {result}") 29 | 30 | 31 | if __name__ == "__main__": 32 | asyncio.run(main()) 33 | -------------------------------------------------------------------------------- /encryption/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import dataclasses 3 | 4 | import temporalio.converter 5 | from temporalio import workflow 6 | from temporalio.client import Client 7 | from temporalio.worker import Worker 8 | 9 | from encryption.codec import EncryptionCodec 10 | 11 | 12 | @workflow.defn(name="Workflow") 13 | class GreetingWorkflow: 14 | @workflow.run 15 | async def run(self, name: str) -> str: 16 | return f"Hello, {name}" 17 | 18 | 19 | interrupt_event = asyncio.Event() 20 | 21 | 22 | async def main(): 23 | # Connect client 24 | client = await Client.connect( 25 | "localhost:7233", 26 | # Use the default converter, but change the codec 27 | data_converter=dataclasses.replace( 28 | temporalio.converter.default(), payload_codec=EncryptionCodec() 29 | ), 30 | ) 31 | 32 | # Run a worker for the workflow 33 | async with Worker( 34 | client, 35 | task_queue="encryption-task-queue", 36 | workflows=[GreetingWorkflow], 37 | ): 38 | # Wait until interrupted 39 | print("Worker started, ctrl+c to exit") 40 | await interrupt_event.wait() 41 | print("Shutting down") 42 | 43 | 44 | if __name__ == "__main__": 45 | loop = asyncio.new_event_loop() 46 | try: 47 | loop.run_until_complete(main()) 48 | except KeyboardInterrupt: 49 | interrupt_event.set() 50 | loop.run_until_complete(loop.shutdown_asyncgens()) 51 | -------------------------------------------------------------------------------- /gevent_async/README.md: -------------------------------------------------------------------------------- 1 | # Gevent Sample 2 | 3 | This sample shows how to run Temporal in an environment that gevent has patched. 4 | 5 | Gevent is built to patch Python libraries to attempt to seamlessly convert threaded code into coroutine-based code. 6 | However, it is well known within the gevent community that it does not work well with `asyncio`, which is the modern 7 | Python approach to coroutines. Temporal leverages `asyncio` which means by default it is incompatible with gevent. Users 8 | are encouraged to abandon gevent in favor of more modern approaches where they can but it is not always possible. 9 | 10 | This sample shows how to use a customized gevent executor to run `asyncio` Temporal clients, workers, activities, and 11 | workflows. 12 | 13 | For this sample, the optional `gevent` dependency group must be included. To include, run: 14 | 15 | uv sync --group gevent 16 | 17 | To run the sample, first see [README.md](../README.md) for prerequisites such as having a localhost Temporal server 18 | running. Then, run the following from this directory to start the worker: 19 | 20 | uv run worker.py 21 | 22 | This will start the worker. The worker has a workflow and two activities, one `asyncio` based and one gevent based. Now 23 | in another terminal, run the following from this directory to execute the workflow: 24 | 25 | uv run starter.py 26 | 27 | The workflow should run and complete with the hello result. Note on the worker terminal there will be logs of the 28 | workflow and activity executions. -------------------------------------------------------------------------------- /gevent_async/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/gevent_async/__init__.py -------------------------------------------------------------------------------- /gevent_async/activity.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | import gevent 4 | from temporalio import activity 5 | 6 | 7 | @dataclass 8 | class ComposeGreetingInput: 9 | greeting: str 10 | name: str 11 | 12 | 13 | @activity.defn 14 | async def compose_greeting_async(input: ComposeGreetingInput) -> str: 15 | activity.logger.info(f"Running async activity with parameter {input}") 16 | return f"{input.greeting}, {input.name}!" 17 | 18 | 19 | @activity.defn 20 | def compose_greeting_sync(input: ComposeGreetingInput) -> str: 21 | activity.logger.info( 22 | f"Running sync activity with parameter {input}, " 23 | f"in greenlet: {gevent.getcurrent()}" 24 | ) 25 | return f"{input.greeting}, {input.name}!" 26 | -------------------------------------------------------------------------------- /gevent_async/executor.py: -------------------------------------------------------------------------------- 1 | import functools 2 | from concurrent.futures import Future 3 | from typing import Callable, TypeVar 4 | 5 | from gevent import threadpool 6 | from typing_extensions import ParamSpec 7 | 8 | T = TypeVar("T") 9 | P = ParamSpec("P") 10 | 11 | 12 | class GeventExecutor(threadpool.ThreadPoolExecutor): 13 | def submit( 14 | self, fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs 15 | ) -> Future[T]: 16 | # Gevent's returned futures do not map well to Python futures, so we 17 | # must translate. We can't just use set_result/set_exception because 18 | # done callbacks are not always called in gevent's case and it doesn't 19 | # seem to support cancel, so we instead wrap the caller function. 20 | python_fut: Future[T] = Future() 21 | 22 | @functools.wraps(fn) 23 | def wrapper(*w_args: P.args, **w_kwargs: P.kwargs) -> None: 24 | try: 25 | result = fn(*w_args, **w_kwargs) 26 | # Swallow InvalidStateError in case Python future was cancelled 27 | try: 28 | python_fut.set_result(result) 29 | except: 30 | pass 31 | except Exception as exc: 32 | # Swallow InvalidStateError in case Python future was cancelled 33 | try: 34 | python_fut.set_exception(exc) 35 | except: 36 | pass 37 | 38 | # Submit our wrapper to gevent 39 | super().submit(wrapper, *args, **kwargs) 40 | # Return Python future to user 41 | return python_fut 42 | -------------------------------------------------------------------------------- /gevent_async/starter.py: -------------------------------------------------------------------------------- 1 | # Init gevent 2 | from gevent import monkey 3 | 4 | monkey.patch_all() 5 | 6 | import asyncio 7 | import logging 8 | 9 | from temporalio.client import Client 10 | 11 | from gevent_async import workflow 12 | from gevent_async.executor import GeventExecutor 13 | 14 | 15 | def main(): 16 | logging.basicConfig(level=logging.INFO) 17 | 18 | # Create single-worker gevent executor and run asyncio.run(async_main()) in 19 | # it, waiting for result. This executor cannot be used for anything else in 20 | # Temporal, it is just a single thread for running asyncio. 21 | with GeventExecutor(max_workers=1) as executor: 22 | executor.submit(asyncio.run, async_main()).result() 23 | 24 | 25 | async def async_main(): 26 | # Connect client 27 | client = await Client.connect("localhost:7233") 28 | 29 | # Run workflow 30 | result = await client.execute_workflow( 31 | workflow.GreetingWorkflow.run, 32 | "Temporal", 33 | id="gevent_async-workflow-id", 34 | task_queue="gevent_async-task-queue", 35 | ) 36 | logging.info(f"Workflow result: {result}") 37 | 38 | 39 | if __name__ == "__main__": 40 | main() 41 | -------------------------------------------------------------------------------- /gevent_async/test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/gevent_async/test/__init__.py -------------------------------------------------------------------------------- /gevent_async/test/run_combined.py: -------------------------------------------------------------------------------- 1 | # Init gevent 2 | from gevent import monkey 3 | 4 | monkey.patch_all() 5 | 6 | import asyncio 7 | import logging 8 | 9 | from temporalio.testing import WorkflowEnvironment 10 | from temporalio.worker import Worker 11 | 12 | from gevent_async import activity, workflow 13 | from gevent_async.executor import GeventExecutor 14 | 15 | # This basically combines ../worker.py and ../starter.py for use by CI to 16 | # confirm this works in all environments 17 | 18 | 19 | def main(): 20 | logging.basicConfig(level=logging.INFO) 21 | with GeventExecutor(max_workers=1) as executor: 22 | executor.submit(asyncio.run, async_main()).result() 23 | 24 | 25 | async def async_main(): 26 | logging.info("Starting local server") 27 | async with await WorkflowEnvironment.start_local() as env: 28 | logging.info("Starting worker") 29 | with GeventExecutor(max_workers=200) as executor: 30 | async with Worker( 31 | env.client, 32 | task_queue="gevent_async-task-queue", 33 | workflows=[workflow.GreetingWorkflow], 34 | activities=[ 35 | activity.compose_greeting_async, 36 | activity.compose_greeting_sync, 37 | ], 38 | activity_executor=executor, 39 | workflow_task_executor=executor, 40 | max_concurrent_activities=100, 41 | max_concurrent_workflow_tasks=100, 42 | ): 43 | logging.info("Running workflow") 44 | result = await env.client.execute_workflow( 45 | workflow.GreetingWorkflow.run, 46 | "Temporal", 47 | id="gevent_async-workflow-id", 48 | task_queue="gevent_async-task-queue", 49 | ) 50 | if result != "Hello, Temporal!": 51 | raise RuntimeError(f"Unexpected result: {result}") 52 | logging.info(f"Workflow complete, result: {result}") 53 | 54 | 55 | if __name__ == "__main__": 56 | main() 57 | -------------------------------------------------------------------------------- /gevent_async/workflow.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | 5 | with workflow.unsafe.imports_passed_through(): 6 | from gevent_async.activity import ( 7 | ComposeGreetingInput, 8 | compose_greeting_async, 9 | compose_greeting_sync, 10 | ) 11 | 12 | 13 | @workflow.defn 14 | class GreetingWorkflow: 15 | @workflow.run 16 | async def run(self, name: str) -> str: 17 | workflow.logger.info("Running workflow with parameter %s" % name) 18 | 19 | # Run an async and a sync activity 20 | async_res = await workflow.execute_activity( 21 | compose_greeting_async, 22 | ComposeGreetingInput("Hello", name), 23 | start_to_close_timeout=timedelta(seconds=10), 24 | ) 25 | sync_res = await workflow.execute_activity( 26 | compose_greeting_sync, 27 | ComposeGreetingInput("Hello", name), 28 | start_to_close_timeout=timedelta(seconds=10), 29 | ) 30 | 31 | # Confirm the same, return one 32 | if async_res != sync_res: 33 | raise ValueError("Results are not the same") 34 | return sync_res 35 | -------------------------------------------------------------------------------- /hello/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/hello/__init__.py -------------------------------------------------------------------------------- /hello/hello_activity_method.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from datetime import timedelta 3 | 4 | from temporalio import activity, workflow 5 | from temporalio.client import Client 6 | from temporalio.worker import Worker 7 | 8 | 9 | class MyDatabaseClient: 10 | async def run_database_update(self) -> None: 11 | print("Database update executed") 12 | 13 | 14 | class MyActivities: 15 | def __init__(self, db_client: MyDatabaseClient) -> None: 16 | self.db_client = db_client 17 | 18 | @activity.defn 19 | async def do_database_thing(self) -> None: 20 | await self.db_client.run_database_update() 21 | 22 | 23 | @workflow.defn 24 | class MyWorkflow: 25 | @workflow.run 26 | async def run(self) -> None: 27 | await workflow.execute_activity_method( 28 | MyActivities.do_database_thing, 29 | start_to_close_timeout=timedelta(seconds=10), 30 | ) 31 | 32 | 33 | async def main(): 34 | # Start client 35 | client = await Client.connect("localhost:7233") 36 | 37 | # Create our database client that can then be used in the activity 38 | db_client = MyDatabaseClient() 39 | # Instantiate our class containing state that can be referenced from 40 | # activity methods 41 | my_activities = MyActivities(db_client) 42 | 43 | # Run a worker for the workflow 44 | async with Worker( 45 | client, 46 | task_queue="hello-activity-method-task-queue", 47 | workflows=[MyWorkflow], 48 | activities=[my_activities.do_database_thing], 49 | ): 50 | 51 | # While the worker is running, use the client to run the workflow and 52 | # print out its result. Note, in many production setups, the client 53 | # would be in a completely separate process from the worker. 54 | await client.execute_workflow( 55 | MyWorkflow.run, 56 | id="hello-activity-method-workflow-id", 57 | task_queue="hello-activity-method-task-queue", 58 | ) 59 | 60 | 61 | if __name__ == "__main__": 62 | asyncio.run(main()) 63 | -------------------------------------------------------------------------------- /hello/hello_child_workflow.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from dataclasses import dataclass 3 | 4 | from temporalio import workflow 5 | from temporalio.client import Client 6 | from temporalio.worker import Worker 7 | 8 | 9 | @dataclass 10 | class ComposeGreetingInput: 11 | greeting: str 12 | name: str 13 | 14 | 15 | @workflow.defn 16 | class ComposeGreetingWorkflow: 17 | @workflow.run 18 | async def run(self, input: ComposeGreetingInput) -> str: 19 | return f"{input.greeting}, {input.name}!" 20 | 21 | 22 | @workflow.defn 23 | class GreetingWorkflow: 24 | @workflow.run 25 | async def run(self, name: str) -> str: 26 | return await workflow.execute_child_workflow( 27 | ComposeGreetingWorkflow.run, 28 | ComposeGreetingInput("Hello", name), 29 | id="hello-child-workflow-workflow-child-id", 30 | ) 31 | 32 | 33 | async def main(): 34 | # Start client 35 | client = await Client.connect("localhost:7233") 36 | 37 | # Run a worker for the workflow 38 | async with Worker( 39 | client, 40 | task_queue="hello-child-workflow-task-queue", 41 | workflows=[GreetingWorkflow, ComposeGreetingWorkflow], 42 | ): 43 | 44 | # While the worker is running, use the client to run the workflow and 45 | # print out its result. Note, in many production setups, the client 46 | # would be in a completely separate process from the worker. 47 | result = await client.execute_workflow( 48 | GreetingWorkflow.run, 49 | "World", 50 | id="hello-child-workflow-workflow-id", 51 | task_queue="hello-child-workflow-task-queue", 52 | ) 53 | print(f"Result: {result}") 54 | 55 | 56 | if __name__ == "__main__": 57 | asyncio.run(main()) 58 | -------------------------------------------------------------------------------- /hello/hello_continue_as_new.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from temporalio import workflow 5 | from temporalio.client import Client 6 | from temporalio.worker import Worker 7 | 8 | 9 | @workflow.defn 10 | class LoopingWorkflow: 11 | @workflow.run 12 | async def run(self, iteration: int) -> None: 13 | if iteration == 10: 14 | return 15 | workflow.logger.info("Running workflow iteration %s", iteration) 16 | await asyncio.sleep(1) 17 | workflow.continue_as_new(iteration + 1) 18 | 19 | 20 | async def main(): 21 | # Enable logging for this sample 22 | logging.basicConfig(level=logging.INFO) 23 | 24 | # Start client 25 | client = await Client.connect("localhost:7233") 26 | 27 | # Run a worker for the workflow 28 | async with Worker( 29 | client, 30 | task_queue="hello-continue-as-new-task-queue", 31 | workflows=[LoopingWorkflow], 32 | ): 33 | 34 | # While the worker is running, use the client to run the workflow. Note, 35 | # in many production setups, the client would be in a completely 36 | # separate process from the worker. 37 | await client.execute_workflow( 38 | LoopingWorkflow.run, 39 | 0, 40 | id="hello-continue-as-new-workflow-id", 41 | task_queue="hello-continue-as-new-task-queue", 42 | ) 43 | 44 | 45 | if __name__ == "__main__": 46 | asyncio.run(main()) 47 | -------------------------------------------------------------------------------- /hello/hello_cron.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from dataclasses import dataclass 3 | from datetime import timedelta 4 | 5 | from temporalio import activity, workflow 6 | from temporalio.client import Client 7 | from temporalio.worker import Worker 8 | 9 | 10 | @dataclass 11 | class ComposeGreetingInput: 12 | greeting: str 13 | name: str 14 | 15 | 16 | @activity.defn 17 | async def compose_greeting(input: ComposeGreetingInput) -> str: 18 | return f"{input.greeting}, {input.name}!" 19 | 20 | 21 | @workflow.defn 22 | class GreetingWorkflow: 23 | @workflow.run 24 | async def run(self, name: str) -> None: 25 | result = await workflow.execute_activity( 26 | compose_greeting, 27 | ComposeGreetingInput("Hello", name), 28 | start_to_close_timeout=timedelta(seconds=10), 29 | ) 30 | workflow.logger.info("Result: %s", result) 31 | 32 | 33 | async def main(): 34 | # Start client 35 | client = await Client.connect("localhost:7233") 36 | 37 | # Run a worker for the workflow 38 | async with Worker( 39 | client, 40 | task_queue="hello-cron-task-queue", 41 | workflows=[GreetingWorkflow], 42 | activities=[compose_greeting], 43 | ): 44 | 45 | print("Running workflow once a minute") 46 | 47 | # While the worker is running, use the client to start the workflow. 48 | # Note, in many production setups, the client would be in a completely 49 | # separate process from the worker. 50 | await client.start_workflow( 51 | GreetingWorkflow.run, 52 | "World", 53 | id="hello-cron-workflow-id", 54 | task_queue="hello-cron-task-queue", 55 | cron_schedule="* * * * *", 56 | ) 57 | 58 | # Wait forever 59 | await asyncio.Future() 60 | 61 | 62 | if __name__ == "__main__": 63 | asyncio.run(main()) 64 | -------------------------------------------------------------------------------- /hello/hello_local_activity.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from dataclasses import dataclass 3 | from datetime import timedelta 4 | 5 | from temporalio import activity, workflow 6 | from temporalio.client import Client 7 | from temporalio.worker import Worker 8 | 9 | 10 | @dataclass 11 | class ComposeGreetingInput: 12 | greeting: str 13 | name: str 14 | 15 | 16 | @activity.defn 17 | async def compose_greeting(input: ComposeGreetingInput) -> str: 18 | return f"{input.greeting}, {input.name}!" 19 | 20 | 21 | @workflow.defn 22 | class GreetingWorkflow: 23 | @workflow.run 24 | async def run(self, name: str) -> str: 25 | return await workflow.execute_local_activity( 26 | compose_greeting, 27 | ComposeGreetingInput("Hello", name), 28 | start_to_close_timeout=timedelta(seconds=10), 29 | ) 30 | 31 | 32 | async def main(): 33 | # Start client 34 | client = await Client.connect("localhost:7233") 35 | 36 | # Run a worker for the workflow 37 | async with Worker( 38 | client, 39 | task_queue="hello-local-activity-task-queue", 40 | workflows=[GreetingWorkflow], 41 | activities=[compose_greeting], 42 | ): 43 | 44 | # While the worker is running, use the client to run the workflow and 45 | # print out its result. Note, in many production setups, the client 46 | # would be in a completely separate process from the worker. 47 | result = await client.execute_workflow( 48 | GreetingWorkflow.run, 49 | "World", 50 | id="hello-local-activity-workflow-id", 51 | task_queue="hello-local-activity-task-queue", 52 | ) 53 | print(f"Result: {result}") 54 | 55 | 56 | if __name__ == "__main__": 57 | asyncio.run(main()) 58 | -------------------------------------------------------------------------------- /hello/hello_query.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio import workflow 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | 8 | @workflow.defn 9 | class GreetingWorkflow: 10 | def __init__(self) -> None: 11 | self._greeting = "" 12 | 13 | @workflow.run 14 | async def run(self, name: str) -> None: 15 | # Set the greeting, wait a couple of seconds, then change it 16 | self._greeting = f"Hello, {name}!" 17 | await asyncio.sleep(2) 18 | self._greeting = f"Goodbye, {name}!" 19 | 20 | # It's ok to end the workflow here. Queries work even after workflow 21 | # completion. 22 | 23 | @workflow.query 24 | def greeting(self) -> str: 25 | return self._greeting 26 | 27 | 28 | async def main(): 29 | # Start client 30 | client = await Client.connect("localhost:7233") 31 | 32 | # Run a worker for the workflow 33 | async with Worker( 34 | client, 35 | task_queue="hello-query-task-queue", 36 | workflows=[GreetingWorkflow], 37 | ): 38 | 39 | # While the worker is running, use the client to start the workflow. 40 | # Note, in many production setups, the client would be in a completely 41 | # separate process from the worker. 42 | handle = await client.start_workflow( 43 | GreetingWorkflow.run, 44 | "World", 45 | id="hello-query-workflow-id", 46 | task_queue="hello-query-task-queue", 47 | ) 48 | 49 | # Immediately query 50 | result = await handle.query(GreetingWorkflow.greeting) 51 | print(f"First greeting result: {result}") 52 | 53 | # Wait a few of seconds then query again. This works even if the 54 | # workflow has already completed. 55 | await asyncio.sleep(3) 56 | result = await handle.query(GreetingWorkflow.greeting) 57 | print(f"Second greeting result: {result}") 58 | 59 | 60 | if __name__ == "__main__": 61 | asyncio.run(main()) 62 | -------------------------------------------------------------------------------- /hello/hello_search_attributes.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio import workflow 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | 8 | @workflow.defn 9 | class GreetingWorkflow: 10 | @workflow.run 11 | async def run(self) -> None: 12 | # Wait a couple seconds, then alter the keyword search attribute 13 | await asyncio.sleep(2) 14 | workflow.upsert_search_attributes({"CustomKeywordField": ["new-value"]}) 15 | 16 | 17 | async def main(): 18 | # Start client 19 | client = await Client.connect("localhost:7233") 20 | 21 | # Run a worker for the workflow 22 | async with Worker( 23 | client, 24 | task_queue="hello-search-attributes-task-queue", 25 | workflows=[GreetingWorkflow], 26 | ): 27 | 28 | # While the worker is running, use the client to start the workflow. 29 | # Note, in many production setups, the client would be in a completely 30 | # separate process from the worker. 31 | handle = await client.start_workflow( 32 | GreetingWorkflow.run, 33 | id="hello-search-attributes-workflow-id", 34 | task_queue="hello-search-attributes-task-queue", 35 | # Start with default set of search attributes 36 | search_attributes={"CustomKeywordField": ["old-value"]}, 37 | ) 38 | 39 | # Show search attributes before and after a few seconds 40 | print( 41 | "First search attribute values: ", 42 | (await handle.describe()).search_attributes.get("CustomKeywordField"), 43 | ) 44 | await asyncio.sleep(3) 45 | print( 46 | "Second search attribute values: ", 47 | (await handle.describe()).search_attributes.get("CustomKeywordField"), 48 | ) 49 | 50 | 51 | if __name__ == "__main__": 52 | asyncio.run(main()) 53 | -------------------------------------------------------------------------------- /hello/hello_update.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio import workflow 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | 8 | @workflow.defn 9 | class GreetingWorkflow: 10 | def __init__(self): 11 | self.is_complete = False 12 | 13 | @workflow.run 14 | async def run(self) -> str: 15 | await workflow.wait_condition(lambda: self.is_complete) 16 | return "Hello, World!" 17 | 18 | @workflow.update 19 | async def update_workflow_status(self) -> str: 20 | self.is_complete = True 21 | return "Workflow status updated" 22 | 23 | 24 | async def main(): 25 | client = await Client.connect("localhost:7233") 26 | 27 | # Run a worker for the workflow 28 | async with Worker( 29 | client, 30 | task_queue="update-workflow-task-queue", 31 | workflows=[GreetingWorkflow], 32 | ): 33 | # While the worker is running, use the client to start the workflow. 34 | # Note, in many production setups, the client would be in a completely 35 | # separate process from the worker. 36 | handle = await client.start_workflow( 37 | GreetingWorkflow.run, 38 | id="hello-update-workflow-id", 39 | task_queue="update-workflow-task-queue", 40 | ) 41 | 42 | # Perform the update for GreetingWorkflow 43 | update_result = await handle.execute_update( 44 | GreetingWorkflow.update_workflow_status 45 | ) 46 | print(f"Update Result: {update_result}") 47 | 48 | # Get the result for GreetingWorkflow 49 | result = await handle.result() 50 | print(f"Workflow Result: {result}") 51 | 52 | 53 | if __name__ == "__main__": 54 | asyncio.run(main()) 55 | -------------------------------------------------------------------------------- /langchain/README.md: -------------------------------------------------------------------------------- 1 | # LangChain Sample 2 | 3 | This sample shows you how you can use Temporal to orchestrate workflows for [LangChain](https://www.langchain.com). It includes an interceptor that makes LangSmith traces work seamlessly across Temporal clients, workflows and activities. 4 | 5 | For this sample, the optional `langchain` dependency group must be included. To include, run: 6 | 7 | uv sync --group langchain 8 | 9 | Export your [OpenAI API key](https://platform.openai.com/api-keys) as an environment variable. Replace `YOUR_API_KEY` with your actual OpenAI API key. 10 | 11 | export OPENAI_API_KEY='...' 12 | 13 | To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory to start the 14 | worker: 15 | 16 | uv run worker.py 17 | 18 | This will start the worker. Then, in another terminal, run the following to execute a workflow: 19 | 20 | uv run starter.py 21 | 22 | Then, in another terminal, run the following command to translate a phrase: 23 | 24 | curl -X POST "http://localhost:8000/translate?phrase=hello%20world&language1=Spanish&language2=French&language3=Russian" 25 | 26 | Which should produce some output like: 27 | 28 | {"translations":{"French":"Bonjour tout le monde","Russian":"Привет, мир","Spanish":"Hola mundo"}} 29 | 30 | Check [LangSmith](https://smith.langchain.com/) for the corresponding trace. -------------------------------------------------------------------------------- /langchain/activities.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from langchain_openai import ChatOpenAI 4 | from temporalio import activity 5 | 6 | from langchain.prompts import ChatPromptTemplate 7 | 8 | 9 | @dataclass 10 | class TranslateParams: 11 | phrase: str 12 | language: str 13 | 14 | 15 | @activity.defn 16 | async def translate_phrase(params: TranslateParams) -> str: 17 | # LangChain setup 18 | template = """You are a helpful assistant who translates between languages. 19 | Translate the following phrase into the specified language: {phrase} 20 | Language: {language}""" 21 | chat_prompt = ChatPromptTemplate.from_messages( 22 | [ 23 | ("system", template), 24 | ("human", "Translate"), 25 | ] 26 | ) 27 | chain = chat_prompt | ChatOpenAI() 28 | # Use the asynchronous invoke method 29 | return ( 30 | dict( 31 | await chain.ainvoke({"phrase": params.phrase, "language": params.language}) 32 | ).get("content") 33 | or "" 34 | ) 35 | -------------------------------------------------------------------------------- /langchain/starter.py: -------------------------------------------------------------------------------- 1 | from contextlib import asynccontextmanager 2 | from typing import List 3 | from uuid import uuid4 4 | 5 | import uvicorn 6 | from activities import TranslateParams 7 | from fastapi import FastAPI, HTTPException 8 | from langchain_interceptor import LangChainContextPropagationInterceptor 9 | from temporalio.client import Client 10 | from workflow import LangChainWorkflow, TranslateWorkflowParams 11 | 12 | 13 | @asynccontextmanager 14 | async def lifespan(app: FastAPI): 15 | app.state.temporal_client = await Client.connect( 16 | "localhost:7233", interceptors=[LangChainContextPropagationInterceptor()] 17 | ) 18 | yield 19 | 20 | 21 | app = FastAPI(lifespan=lifespan) 22 | 23 | 24 | @app.post("/translate") 25 | async def translate(phrase: str, language1: str, language2: str, language3: str): 26 | languages = [language1, language2, language3] 27 | client = app.state.temporal_client 28 | try: 29 | result = await client.execute_workflow( 30 | LangChainWorkflow.run, 31 | TranslateWorkflowParams(phrase, languages), 32 | id=f"langchain-translation-{uuid4()}", 33 | task_queue="langchain-task-queue", 34 | ) 35 | except Exception as e: 36 | raise HTTPException(status_code=500, detail=str(e)) 37 | 38 | return {"translations": result} 39 | 40 | 41 | if __name__ == "__main__": 42 | uvicorn.run(app, host="localhost", port=8000) 43 | -------------------------------------------------------------------------------- /langchain/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from activities import translate_phrase 4 | from langchain_interceptor import LangChainContextPropagationInterceptor 5 | from temporalio.client import Client 6 | from temporalio.worker import Worker 7 | from workflow import LangChainChildWorkflow, LangChainWorkflow 8 | 9 | interrupt_event = asyncio.Event() 10 | 11 | 12 | async def main(): 13 | client = await Client.connect("localhost:7233") 14 | worker = Worker( 15 | client, 16 | task_queue="langchain-task-queue", 17 | workflows=[LangChainWorkflow, LangChainChildWorkflow], 18 | activities=[translate_phrase], 19 | interceptors=[LangChainContextPropagationInterceptor()], 20 | ) 21 | 22 | print("\nWorker started, ctrl+c to exit\n") 23 | await worker.run() 24 | try: 25 | # Wait indefinitely until the interrupt event is set 26 | await interrupt_event.wait() 27 | finally: 28 | # The worker will be shutdown gracefully due to the async context manager 29 | print("\nShutting down the worker\n") 30 | 31 | 32 | if __name__ == "__main__": 33 | loop = asyncio.new_event_loop() 34 | asyncio.set_event_loop(loop) 35 | try: 36 | loop.run_until_complete(main()) 37 | except KeyboardInterrupt: 38 | print("\nInterrupt received, shutting down...\n") 39 | interrupt_event.set() 40 | loop.run_until_complete(loop.shutdown_asyncgens()) 41 | -------------------------------------------------------------------------------- /langchain/workflow.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from dataclasses import dataclass 3 | from datetime import timedelta 4 | from typing import List 5 | 6 | from temporalio import workflow 7 | 8 | with workflow.unsafe.imports_passed_through(): 9 | from activities import TranslateParams, translate_phrase 10 | 11 | 12 | @workflow.defn 13 | class LangChainChildWorkflow: 14 | @workflow.run 15 | async def run(self, params: TranslateParams) -> str: 16 | return await workflow.execute_activity( 17 | translate_phrase, 18 | params, 19 | schedule_to_close_timeout=timedelta(seconds=30), 20 | ) 21 | 22 | 23 | @dataclass 24 | class TranslateWorkflowParams: 25 | phrase: str 26 | languages: List[str] 27 | 28 | 29 | @workflow.defn 30 | class LangChainWorkflow: 31 | @workflow.run 32 | async def run(self, params: TranslateWorkflowParams) -> dict: 33 | result1, result2, result3 = await asyncio.gather( 34 | workflow.execute_activity( 35 | translate_phrase, 36 | TranslateParams(params.phrase, params.languages[0]), 37 | schedule_to_close_timeout=timedelta(seconds=30), 38 | ), 39 | workflow.execute_activity( 40 | translate_phrase, 41 | TranslateParams(params.phrase, params.languages[1]), 42 | schedule_to_close_timeout=timedelta(seconds=30), 43 | ), 44 | workflow.execute_child_workflow( 45 | LangChainChildWorkflow.run, 46 | TranslateParams(params.phrase, params.languages[2]), 47 | ), 48 | ) 49 | return { 50 | params.languages[0]: result1, 51 | params.languages[1]: result2, 52 | params.languages[2]: result3, 53 | } 54 | -------------------------------------------------------------------------------- /message_passing/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/message_passing/__init__.py -------------------------------------------------------------------------------- /message_passing/introduction/README.md: -------------------------------------------------------------------------------- 1 | # Introduction to message-passing 2 | 3 | This sample provides an introduction to using Query, Signal, and Update. 4 | 5 | See https://docs.temporal.io/develop/python/message-passing. 6 | 7 | To run, first see the main [README.md](../../README.md) for prerequisites. 8 | 9 | Then create two terminals and `cd` to this directory. 10 | 11 | Run the worker in one terminal: 12 | 13 | uv run worker.py 14 | 15 | And execute the workflow in the other terminal: 16 | 17 | uv run starter.py 18 | 19 | -------------------------------------------------------------------------------- /message_passing/introduction/__init__.py: -------------------------------------------------------------------------------- 1 | from enum import IntEnum 2 | 3 | TASK_QUEUE = "message-passing-introduction-task-queue" 4 | 5 | 6 | class Language(IntEnum): 7 | ARABIC = 1 8 | CHINESE = 2 9 | ENGLISH = 3 10 | FRENCH = 4 11 | HINDI = 5 12 | PORTUGUESE = 6 13 | SPANISH = 7 14 | -------------------------------------------------------------------------------- /message_passing/introduction/activities.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Optional 3 | 4 | from temporalio import activity 5 | 6 | from message_passing.introduction import Language 7 | 8 | 9 | @activity.defn 10 | async def call_greeting_service(to_language: Language) -> Optional[str]: 11 | """ 12 | An Activity that simulates a call to a remote greeting service. 13 | The remote greeting service supports the full range of languages. 14 | """ 15 | greetings = { 16 | Language.ARABIC: "مرحبا بالعالم", 17 | Language.CHINESE: "你好,世界", 18 | Language.ENGLISH: "Hello, world", 19 | Language.FRENCH: "Bonjour, monde", 20 | Language.HINDI: "नमस्ते दुनिया", 21 | Language.PORTUGUESE: "Olá mundo", 22 | Language.SPANISH: "Hola mundo", 23 | } 24 | await asyncio.sleep(0.2) # Simulate a network call 25 | return greetings.get(to_language) 26 | -------------------------------------------------------------------------------- /message_passing/introduction/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Optional 3 | 4 | from temporalio.client import Client, WorkflowUpdateStage 5 | 6 | from message_passing.introduction import TASK_QUEUE 7 | from message_passing.introduction.workflows import ( 8 | ApproveInput, 9 | GetLanguagesInput, 10 | GreetingWorkflow, 11 | Language, 12 | ) 13 | 14 | 15 | async def main(client: Optional[Client] = None): 16 | client = client or await Client.connect("localhost:7233") 17 | wf_handle = await client.start_workflow( 18 | GreetingWorkflow.run, 19 | id="greeting-workflow-1234", 20 | task_queue=TASK_QUEUE, 21 | ) 22 | 23 | # 👉 Send a Query 24 | supported_languages = await wf_handle.query( 25 | GreetingWorkflow.get_languages, GetLanguagesInput(include_unsupported=False) 26 | ) 27 | print(f"supported languages: {supported_languages}") 28 | 29 | # 👉 Execute an Update 30 | previous_language = await wf_handle.execute_update( 31 | GreetingWorkflow.set_language, Language.CHINESE 32 | ) 33 | current_language = await wf_handle.query(GreetingWorkflow.get_language) 34 | print(f"language changed: {previous_language.name} -> {current_language.name}") 35 | 36 | # 👉 Start an Update and then wait for it to complete 37 | update_handle = await wf_handle.start_update( 38 | GreetingWorkflow.set_language_using_activity, 39 | Language.ARABIC, 40 | wait_for_stage=WorkflowUpdateStage.ACCEPTED, 41 | ) 42 | previous_language = await update_handle.result() 43 | current_language = await wf_handle.query(GreetingWorkflow.get_language) 44 | print(f"language changed: {previous_language.name} -> {current_language.name}") 45 | 46 | # 👉 Send a Signal 47 | await wf_handle.signal(GreetingWorkflow.approve, ApproveInput(name="")) 48 | print(await wf_handle.result()) 49 | 50 | 51 | if __name__ == "__main__": 52 | asyncio.run(main()) 53 | -------------------------------------------------------------------------------- /message_passing/introduction/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from message_passing.introduction import TASK_QUEUE 8 | from message_passing.introduction.activities import call_greeting_service 9 | from message_passing.introduction.workflows import GreetingWorkflow 10 | 11 | interrupt_event = asyncio.Event() 12 | 13 | 14 | async def main(): 15 | logging.basicConfig(level=logging.INFO) 16 | 17 | client = await Client.connect("localhost:7233") 18 | 19 | async with Worker( 20 | client, 21 | task_queue=TASK_QUEUE, 22 | workflows=[GreetingWorkflow], 23 | activities=[call_greeting_service], 24 | ): 25 | logging.info("Worker started, ctrl+c to exit") 26 | await interrupt_event.wait() 27 | logging.info("Shutting down") 28 | 29 | 30 | if __name__ == "__main__": 31 | loop = asyncio.new_event_loop() 32 | try: 33 | loop.run_until_complete(main()) 34 | except KeyboardInterrupt: 35 | interrupt_event.set() 36 | loop.run_until_complete(loop.shutdown_asyncgens()) 37 | -------------------------------------------------------------------------------- /message_passing/safe_message_handlers/README.md: -------------------------------------------------------------------------------- 1 | # Atomic message handlers 2 | 3 | This sample shows off important techniques for handling signals and updates, aka messages. In particular, it illustrates how message handlers can interleave or not be completed before the workflow completes, and how you can manage that. 4 | 5 | * Here, using workflow.wait_condition, signal and update handlers will only operate when the workflow is within a certain state--between cluster_started and cluster_shutdown. 6 | * Message handlers can block and their actions can be interleaved with one another and with the main workflow. This can easily cause bugs, so you can use a lock to protect shared state from interleaved access. 7 | * An "Entity" workflow, i.e. a long-lived workflow, periodically "continues as new". It must do this to prevent its history from growing too large, and it passes its state to the next workflow. You can check `workflow.info().is_continue_as_new_suggested()` to see when it's time. 8 | * Most people want their message handlers to finish before the workflow run completes or continues as new. Use `await workflow.wait_condition(lambda: workflow.all_handlers_finished())` to achieve this. 9 | * Message handlers can be made idempotent. See update `ClusterManager.assign_nodes_to_job`. 10 | 11 | To run, first see [README.md](../../README.md) for prerequisites. 12 | 13 | Then, run the following from this directory to run the worker: 14 | \ 15 | uv run worker.py 16 | 17 | Then, in another terminal, run the following to execute the workflow: 18 | 19 | uv run starter.py 20 | 21 | This will start a worker to run your workflow and activities, then start a ClusterManagerWorkflow and put it through its paces. 22 | -------------------------------------------------------------------------------- /message_passing/safe_message_handlers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/message_passing/safe_message_handlers/__init__.py -------------------------------------------------------------------------------- /message_passing/safe_message_handlers/activities.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from dataclasses import dataclass 3 | from typing import List, Set 4 | 5 | from temporalio import activity 6 | 7 | 8 | @dataclass 9 | class AssignNodesToJobInput: 10 | nodes: List[str] 11 | job_name: str 12 | 13 | 14 | @dataclass 15 | class ClusterState: 16 | node_ids: List[str] 17 | 18 | 19 | @activity.defn 20 | async def start_cluster() -> ClusterState: 21 | return ClusterState(node_ids=[f"node-{i}" for i in range(25)]) 22 | 23 | 24 | @activity.defn 25 | async def assign_nodes_to_job(input: AssignNodesToJobInput) -> None: 26 | print(f"Assigning nodes {input.nodes} to job {input.job_name}") 27 | await asyncio.sleep(0.1) 28 | 29 | 30 | @dataclass 31 | class UnassignNodesForJobInput: 32 | nodes: List[str] 33 | job_name: str 34 | 35 | 36 | @activity.defn 37 | async def unassign_nodes_for_job(input: UnassignNodesForJobInput) -> None: 38 | print(f"Deallocating nodes {input.nodes} from job {input.job_name}") 39 | await asyncio.sleep(0.1) 40 | 41 | 42 | @dataclass 43 | class FindBadNodesInput: 44 | nodes_to_check: Set[str] 45 | 46 | 47 | @activity.defn 48 | async def find_bad_nodes(input: FindBadNodesInput) -> Set[str]: 49 | await asyncio.sleep(0.1) 50 | bad_nodes = set([id for id in input.nodes_to_check if hash(id) % 5 == 0]) 51 | if bad_nodes: 52 | print(f"Found bad nodes: {bad_nodes}") 53 | else: 54 | print("No new bad nodes found.") 55 | return bad_nodes 56 | -------------------------------------------------------------------------------- /message_passing/safe_message_handlers/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from message_passing.safe_message_handlers.workflow import ( 8 | ClusterManagerWorkflow, 9 | assign_nodes_to_job, 10 | find_bad_nodes, 11 | start_cluster, 12 | unassign_nodes_for_job, 13 | ) 14 | 15 | interrupt_event = asyncio.Event() 16 | 17 | 18 | async def main(): 19 | client = await Client.connect("localhost:7233") 20 | 21 | async with Worker( 22 | client, 23 | task_queue="safe-message-handlers-task-queue", 24 | workflows=[ClusterManagerWorkflow], 25 | activities=[ 26 | assign_nodes_to_job, 27 | unassign_nodes_for_job, 28 | find_bad_nodes, 29 | start_cluster, 30 | ], 31 | ): 32 | logging.info("ClusterManagerWorkflow worker started, ctrl+c to exit") 33 | await interrupt_event.wait() 34 | logging.info("Shutting down") 35 | 36 | 37 | if __name__ == "__main__": 38 | logging.basicConfig(level=logging.INFO) 39 | loop = asyncio.new_event_loop() 40 | try: 41 | loop.run_until_complete(main()) 42 | except KeyboardInterrupt: 43 | interrupt_event.set() 44 | loop.run_until_complete(loop.shutdown_asyncgens()) 45 | -------------------------------------------------------------------------------- /message_passing/update_with_start/lazy_initialization/README.md: -------------------------------------------------------------------------------- 1 | # Update With Start: Lazy init 2 | 3 | This sample illustrates the use of update-with-start to send Updates to a Workflow, starting the Workflow if 4 | it is not running yet ("lazy init"). The Workflow represents a Shopping Cart in an e-commerce application, and 5 | update-with-start is used to add items to the cart, receiving back the updated cart subtotal. 6 | 7 | To run, first see the main [README.md](../../../README.md) for prerequisites. 8 | 9 | Then run the following from this directory: 10 | 11 | uv run worker.py 12 | 13 | Then, in another terminal: 14 | 15 | uv run starter.py 16 | 17 | This will start a worker to run your workflow and activities, then simulate a backend application receiving 18 | requests to add items to a shopping cart, before finalizing the order. 19 | -------------------------------------------------------------------------------- /message_passing/update_with_start/lazy_initialization/__init__.py: -------------------------------------------------------------------------------- 1 | TASK_QUEUE = "update-with-start-lazy-initialization" 2 | -------------------------------------------------------------------------------- /message_passing/update_with_start/lazy_initialization/activities.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from dataclasses import dataclass 3 | from typing import Optional 4 | 5 | from temporalio import activity 6 | 7 | 8 | @dataclass 9 | class ShoppingCartItem: 10 | sku: str 11 | quantity: int 12 | 13 | 14 | @activity.defn 15 | async def get_price(item: ShoppingCartItem) -> Optional[int]: 16 | await asyncio.sleep(0.1) 17 | price = None if item.sku == "sku-456" else 599 18 | if price is None: 19 | return None 20 | return price * item.quantity 21 | -------------------------------------------------------------------------------- /message_passing/update_with_start/lazy_initialization/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from message_passing.update_with_start.lazy_initialization import TASK_QUEUE, workflows 8 | from message_passing.update_with_start.lazy_initialization.activities import get_price 9 | 10 | interrupt_event = asyncio.Event() 11 | 12 | 13 | async def main(): 14 | logging.basicConfig(level=logging.INFO) 15 | 16 | client = await Client.connect("localhost:7233") 17 | 18 | async with Worker( 19 | client, 20 | task_queue=TASK_QUEUE, 21 | workflows=[workflows.ShoppingCartWorkflow], 22 | activities=[get_price], 23 | ): 24 | logging.info("Worker started, ctrl+c to exit") 25 | await interrupt_event.wait() 26 | logging.info("Shutting down") 27 | 28 | 29 | if __name__ == "__main__": 30 | loop = asyncio.new_event_loop() 31 | try: 32 | loop.run_until_complete(main()) 33 | except KeyboardInterrupt: 34 | interrupt_event.set() 35 | loop.run_until_complete(loop.shutdown_asyncgens()) 36 | -------------------------------------------------------------------------------- /message_passing/update_with_start/lazy_initialization/workflows.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from datetime import timedelta 3 | from typing import List, Tuple 4 | 5 | from temporalio import workflow 6 | from temporalio.exceptions import ApplicationError 7 | 8 | with workflow.unsafe.imports_passed_through(): 9 | from message_passing.update_with_start.lazy_initialization.activities import ( 10 | ShoppingCartItem, 11 | get_price, 12 | ) 13 | 14 | 15 | @dataclass 16 | class FinalizedOrder: 17 | id: str 18 | items: List[Tuple[ShoppingCartItem, int]] 19 | total: int 20 | 21 | 22 | @workflow.defn 23 | class ShoppingCartWorkflow: 24 | def __init__(self): 25 | self.items: List[Tuple[ShoppingCartItem, int]] = [] 26 | self.order_submitted = False 27 | 28 | @workflow.run 29 | async def run(self) -> FinalizedOrder: 30 | await workflow.wait_condition( 31 | lambda: workflow.all_handlers_finished() and self.order_submitted 32 | ) 33 | return FinalizedOrder( 34 | id=workflow.info().workflow_id, 35 | items=self.items, 36 | total=sum(price for _, price in self.items), 37 | ) 38 | 39 | @workflow.update 40 | async def add_item(self, item: ShoppingCartItem) -> int: 41 | price = await workflow.execute_activity( 42 | get_price, item, start_to_close_timeout=timedelta(seconds=10) 43 | ) 44 | if price is None: 45 | raise ApplicationError( 46 | f"Item unavailable: {item}", 47 | type="ItemUnavailableError", 48 | ) 49 | self.items.append((item, price)) 50 | 51 | return sum(price for _, price in self.items) 52 | 53 | @add_item.validator 54 | def validate_add_item(self, item: ShoppingCartItem) -> None: 55 | if self.order_submitted: 56 | raise ApplicationError("Order already submitted") 57 | 58 | @workflow.signal 59 | def checkout(self): 60 | self.order_submitted = True 61 | -------------------------------------------------------------------------------- /message_passing/waiting_for_handlers/README.md: -------------------------------------------------------------------------------- 1 | # Waiting for message handlers 2 | 3 | This workflow demonstrates how to wait for signal and update handlers to 4 | finish in the following circumstances: 5 | 6 | - Before a successful return 7 | - On failure 8 | - On cancellation 9 | 10 | Your workflow can also exit via Continue-As-New. In that case you would 11 | usually wait for the handlers to finish immediately before the call to 12 | continue_as_new(); that's not illustrated in this sample. 13 | 14 | 15 | To run, open two terminals and `cd` to this directory in them. 16 | 17 | Run the worker in one terminal: 18 | 19 | uv run worker.py 20 | 21 | And run the workflow-starter code in the other terminal: 22 | 23 | uv run starter.py 24 | 25 | 26 | Here's the output you'll see: 27 | 28 | ``` 29 | workflow exit type: SUCCESS 30 | 🟢 caller received update result 31 | 🟢 caller received workflow result 32 | 33 | 34 | workflow exit type: FAILURE 35 | 🟢 caller received update result 36 | 🔴 caught exception while waiting for workflow result: Workflow execution failed: deliberately failing workflow 37 | 38 | 39 | workflow exit type: CANCELLATION 40 | 🟢 caller received update result 41 | ``` -------------------------------------------------------------------------------- /message_passing/waiting_for_handlers/__init__.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from enum import IntEnum 3 | 4 | TASK_QUEUE = "my-task-queue" 5 | WORKFLOW_ID = "my-workflow-id" 6 | 7 | 8 | class WorkflowExitType(IntEnum): 9 | SUCCESS = 0 10 | FAILURE = 1 11 | CANCELLATION = 2 12 | 13 | 14 | @dataclass 15 | class WorkflowInput: 16 | exit_type: WorkflowExitType 17 | 18 | 19 | @dataclass 20 | class WorkflowResult: 21 | data: str 22 | -------------------------------------------------------------------------------- /message_passing/waiting_for_handlers/activities.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio import activity 4 | 5 | 6 | @activity.defn 7 | async def activity_executed_by_update_handler(): 8 | await asyncio.sleep(1) 9 | -------------------------------------------------------------------------------- /message_passing/waiting_for_handlers/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from message_passing.waiting_for_handlers import TASK_QUEUE 8 | from message_passing.waiting_for_handlers.activities import ( 9 | activity_executed_by_update_handler, 10 | ) 11 | from message_passing.waiting_for_handlers.workflows import WaitingForHandlersWorkflow 12 | 13 | interrupt_event = asyncio.Event() 14 | 15 | 16 | async def main(): 17 | logging.basicConfig(level=logging.INFO) 18 | 19 | client = await Client.connect("localhost:7233") 20 | 21 | async with Worker( 22 | client, 23 | task_queue=TASK_QUEUE, 24 | workflows=[WaitingForHandlersWorkflow], 25 | activities=[ 26 | activity_executed_by_update_handler, 27 | ], 28 | ): 29 | logging.info("Worker started, ctrl+c to exit") 30 | await interrupt_event.wait() 31 | logging.info("Shutting down") 32 | 33 | 34 | if __name__ == "__main__": 35 | loop = asyncio.new_event_loop() 36 | try: 37 | loop.run_until_complete(main()) 38 | except KeyboardInterrupt: 39 | interrupt_event.set() 40 | loop.run_until_complete(loop.shutdown_asyncgens()) 41 | -------------------------------------------------------------------------------- /message_passing/waiting_for_handlers_and_compensation/README.md: -------------------------------------------------------------------------------- 1 | # Waiting for message handlers, and performing compensation and cleanup in message handlers 2 | 3 | This sample demonstrates how to do the following: 4 | 5 | 1. Ensure that all update/signal handlers are finished before a successful 6 | workflow return, and on workflow cancellation and failure. 7 | 2. Perform compensation/cleanup in an update handler when the workflow is 8 | cancelled or fails. 9 | 10 | For a simpler sample showing how to do (1) without (2), see [safe_message_handlers](../safe_message_handlers/README.md). 11 | 12 | To run, open two terminals and `cd` to this directory in them. 13 | 14 | Run the worker in one terminal: 15 | 16 | uv run worker.py 17 | 18 | And run the workflow-starter code in the other terminal: 19 | 20 | uv run starter.py 21 | 22 | 23 | Here's the output you'll see: 24 | 25 | ``` 26 | workflow exit type: SUCCESS 27 | 🟢 caller received update result 28 | 🟢 caller received workflow result 29 | 30 | 31 | workflow exit type: FAILURE 32 | 🔴 caught exception while waiting for update result: Workflow update failed: The update failed because the workflow run exited 33 | 🔴 caught exception while waiting for workflow result: Workflow execution failed: deliberately failing workflow 34 | 35 | 36 | workflow exit type: CANCELLATION 37 | 🔴 caught exception while waiting for update result: Workflow update failed: The update failed because the workflow run exited 38 | 🔴 caught exception while waiting for workflow result: Workflow execution failed: Workflow cancelled 39 | ``` -------------------------------------------------------------------------------- /message_passing/waiting_for_handlers_and_compensation/__init__.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from enum import IntEnum 3 | 4 | TASK_QUEUE = "my-task-queue" 5 | WORKFLOW_ID = "my-workflow-id" 6 | 7 | 8 | class WorkflowExitType(IntEnum): 9 | SUCCESS = 0 10 | FAILURE = 1 11 | CANCELLATION = 2 12 | 13 | 14 | @dataclass 15 | class WorkflowInput: 16 | exit_type: WorkflowExitType 17 | 18 | 19 | @dataclass 20 | class WorkflowResult: 21 | data: str 22 | -------------------------------------------------------------------------------- /message_passing/waiting_for_handlers_and_compensation/activities.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio import activity 4 | 5 | 6 | @activity.defn 7 | async def activity_executed_to_perform_workflow_compensation(): 8 | await asyncio.sleep(1) 9 | 10 | 11 | @activity.defn 12 | async def activity_executed_by_update_handler(): 13 | await asyncio.sleep(1) 14 | 15 | 16 | @activity.defn 17 | async def activity_executed_by_update_handler_to_perform_compensation(): 18 | await asyncio.sleep(1) 19 | -------------------------------------------------------------------------------- /message_passing/waiting_for_handlers_and_compensation/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from message_passing.waiting_for_handlers_and_compensation import TASK_QUEUE 8 | from message_passing.waiting_for_handlers_and_compensation.activities import ( 9 | activity_executed_by_update_handler, 10 | activity_executed_by_update_handler_to_perform_compensation, 11 | activity_executed_to_perform_workflow_compensation, 12 | ) 13 | from message_passing.waiting_for_handlers_and_compensation.workflows import ( 14 | WaitingForHandlersAndCompensationWorkflow, 15 | ) 16 | 17 | interrupt_event = asyncio.Event() 18 | 19 | 20 | async def main(): 21 | logging.basicConfig(level=logging.INFO) 22 | 23 | client = await Client.connect("localhost:7233") 24 | 25 | async with Worker( 26 | client, 27 | task_queue=TASK_QUEUE, 28 | workflows=[WaitingForHandlersAndCompensationWorkflow], 29 | activities=[ 30 | activity_executed_by_update_handler, 31 | activity_executed_by_update_handler_to_perform_compensation, 32 | activity_executed_to_perform_workflow_compensation, 33 | ], 34 | ): 35 | logging.info("Worker started, ctrl+c to exit") 36 | await interrupt_event.wait() 37 | logging.info("Shutting down") 38 | 39 | 40 | if __name__ == "__main__": 41 | loop = asyncio.new_event_loop() 42 | try: 43 | loop.run_until_complete(main()) 44 | except KeyboardInterrupt: 45 | interrupt_event.set() 46 | loop.run_until_complete(loop.shutdown_asyncgens()) 47 | -------------------------------------------------------------------------------- /open_telemetry/README.md: -------------------------------------------------------------------------------- 1 | # OpenTelemetry Sample 2 | 3 | This sample shows how to configure OpenTelemetry to capture workflow traces and SDK metrics. 4 | 5 | For this sample, the optional `open_telemetry` dependency group must be included. To include, run: 6 | 7 | uv sync --group open-telemetry 8 | 9 | To run, first see [README.md](../README.md) for prerequisites. Then run the following to start an [Aspire](https://hub.docker.com/r/microsoft/dotnet-aspire-dashboard/) OTEL collector 10 | 11 | docker compose up 12 | 13 | Now, from this directory, start the worker in its own terminal: 14 | 15 | uv run worker.py 16 | 17 | Then, in another terminal, run the following to execute the workflow: 18 | 19 | uv run starter.py 20 | 21 | The workflow should complete with the hello result. 22 | 23 | Now view the Aspire UI at http://localhost:18888/. 24 | 25 | To view metrics sent describing the worker and the workflow that was executed, select `Metrics` on the left and under "Select a resource" select "temporal-core-sdk". It may look like this: 26 | 27 | ![Aspire metrics screenshot](aspire-metrics-screenshot.png) 28 | 29 | 30 | To view workflow spans, select `Traces` on the left and under "Select a resource" select "temporal-core-sdk". It may look like this: 31 | 32 | ![Aspire traces screenshot](aspire-traces-screenshot.png) 33 | 34 | Note, in-workflow spans do not have a time associated with them. This is by intention since due to replay. In 35 | OpenTelemetry, only the process that started the span may end it. But in Temporal a span may cross workers/processes. 36 | Therefore we intentionally start-then-end in-workflow spans immediately. So while the start time and hierarchy is 37 | accurate, the duration is not. -------------------------------------------------------------------------------- /open_telemetry/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/open_telemetry/__init__.py -------------------------------------------------------------------------------- /open_telemetry/aspire-metrics-screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/open_telemetry/aspire-metrics-screenshot.png -------------------------------------------------------------------------------- /open_telemetry/aspire-traces-screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/open_telemetry/aspire-traces-screenshot.png -------------------------------------------------------------------------------- /open_telemetry/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | aspire-dashboard: 3 | environment: 4 | Dashboard__Frontend__AuthMode: Unsecured 5 | image: mcr.microsoft.com/dotnet/aspire-dashboard:8.0 6 | ports: 7 | - 4317:18889 8 | - 18888:18888 -------------------------------------------------------------------------------- /open_telemetry/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | from temporalio.contrib.opentelemetry import TracingInterceptor 5 | 6 | from open_telemetry.worker import GreetingWorkflow, init_runtime_with_telemetry 7 | 8 | 9 | async def main(): 10 | runtime = init_runtime_with_telemetry() 11 | 12 | # Connect client 13 | client = await Client.connect( 14 | "localhost:7233", 15 | # Use OpenTelemetry interceptor 16 | interceptors=[TracingInterceptor()], 17 | runtime=runtime, 18 | ) 19 | 20 | # Run workflow 21 | result = await client.execute_workflow( 22 | GreetingWorkflow.run, 23 | "Temporal", 24 | id=f"open_telemetry-workflow-id", 25 | task_queue="open_telemetry-task-queue", 26 | ) 27 | print(f"Workflow result: {result}") 28 | 29 | 30 | if __name__ == "__main__": 31 | asyncio.run(main()) 32 | -------------------------------------------------------------------------------- /patching/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/patching/__init__.py -------------------------------------------------------------------------------- /patching/activities.py: -------------------------------------------------------------------------------- 1 | from temporalio import activity 2 | 3 | 4 | @activity.defn 5 | async def pre_patch_activity() -> str: 6 | return "pre-patch" 7 | 8 | 9 | @activity.defn 10 | async def post_patch_activity() -> str: 11 | return "post-patch" 12 | -------------------------------------------------------------------------------- /patching/starter.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import asyncio 3 | 4 | from temporalio.client import Client 5 | 6 | # Since it's just used for typing purposes, it doesn't matter which one we 7 | # import 8 | from patching.workflow_1_initial import MyWorkflow 9 | 10 | 11 | async def main(): 12 | parser = argparse.ArgumentParser(description="Run worker") 13 | parser.add_argument("--start-workflow", help="Start workflow with this ID") 14 | parser.add_argument("--query-workflow", help="Query workflow with this ID") 15 | args = parser.parse_args() 16 | if not args.start_workflow and not args.query_workflow: 17 | raise RuntimeError("Either --start-workflow or --query-workflow is required") 18 | 19 | # Connect client 20 | client = await Client.connect("localhost:7233") 21 | 22 | if args.start_workflow: 23 | handle = await client.start_workflow( 24 | MyWorkflow.run, id=args.start_workflow, task_queue="patching-task-queue" 25 | ) 26 | print(f"Started workflow with ID {handle.id} and run ID {handle.result_run_id}") 27 | if args.query_workflow: 28 | handle = client.get_workflow_handle_for(MyWorkflow.run, args.query_workflow) 29 | result = await handle.query(MyWorkflow.result) 30 | print(f"Query result for ID {handle.id}: {result}") 31 | 32 | 33 | if __name__ == "__main__": 34 | asyncio.run(main()) 35 | -------------------------------------------------------------------------------- /patching/worker.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import asyncio 3 | 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from patching.activities import post_patch_activity, pre_patch_activity 8 | 9 | interrupt_event = asyncio.Event() 10 | 11 | 12 | async def main(): 13 | # Import which workflow based on CLI arg 14 | parser = argparse.ArgumentParser(description="Run worker") 15 | parser.add_argument( 16 | "--workflow", 17 | help="Which workflow. Can be 'initial', 'patched', 'patch-deprecated', or 'patch-complete'", 18 | required=True, 19 | ) 20 | args = parser.parse_args() 21 | if args.workflow == "initial": 22 | from patching.workflow_1_initial import MyWorkflow 23 | elif args.workflow == "patched": 24 | from patching.workflow_2_patched import MyWorkflow # type: ignore 25 | elif args.workflow == "patch-deprecated": 26 | from patching.workflow_3_patch_deprecated import MyWorkflow # type: ignore 27 | elif args.workflow == "patch-complete": 28 | from patching.workflow_4_patch_complete import MyWorkflow # type: ignore 29 | else: 30 | raise RuntimeError("Unrecognized workflow") 31 | 32 | # Connect client 33 | client = await Client.connect("localhost:7233") 34 | 35 | # Run a worker for the workflow 36 | async with Worker( 37 | client, 38 | task_queue="patching-task-queue", 39 | workflows=[MyWorkflow], 40 | activities=[pre_patch_activity, post_patch_activity], 41 | ): 42 | # Wait until interrupted 43 | print("Worker started") 44 | await interrupt_event.wait() 45 | print("Shutting down") 46 | 47 | 48 | if __name__ == "__main__": 49 | loop = asyncio.new_event_loop() 50 | try: 51 | loop.run_until_complete(main()) 52 | except KeyboardInterrupt: 53 | interrupt_event.set() 54 | loop.run_until_complete(loop.shutdown_asyncgens()) 55 | -------------------------------------------------------------------------------- /patching/workflow_1_initial.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | 5 | with workflow.unsafe.imports_passed_through(): 6 | from patching.activities import pre_patch_activity 7 | 8 | 9 | @workflow.defn 10 | class MyWorkflow: 11 | @workflow.run 12 | async def run(self) -> None: 13 | self._result = await workflow.execute_activity( 14 | pre_patch_activity, 15 | schedule_to_close_timeout=timedelta(minutes=5), 16 | ) 17 | 18 | @workflow.query 19 | def result(self) -> str: 20 | return self._result 21 | -------------------------------------------------------------------------------- /patching/workflow_2_patched.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | 5 | with workflow.unsafe.imports_passed_through(): 6 | from patching.activities import post_patch_activity, pre_patch_activity 7 | 8 | 9 | @workflow.defn 10 | class MyWorkflow: 11 | @workflow.run 12 | async def run(self) -> None: 13 | if workflow.patched("my-patch"): 14 | self._result = await workflow.execute_activity( 15 | post_patch_activity, 16 | schedule_to_close_timeout=timedelta(minutes=5), 17 | ) 18 | else: 19 | self._result = await workflow.execute_activity( 20 | pre_patch_activity, 21 | schedule_to_close_timeout=timedelta(minutes=5), 22 | ) 23 | 24 | @workflow.query 25 | def result(self) -> str: 26 | return self._result 27 | -------------------------------------------------------------------------------- /patching/workflow_3_patch_deprecated.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | 5 | with workflow.unsafe.imports_passed_through(): 6 | from patching.activities import post_patch_activity 7 | 8 | 9 | @workflow.defn 10 | class MyWorkflow: 11 | @workflow.run 12 | async def run(self) -> None: 13 | workflow.deprecate_patch("my-patch") 14 | self._result = await workflow.execute_activity( 15 | post_patch_activity, 16 | schedule_to_close_timeout=timedelta(minutes=5), 17 | ) 18 | 19 | @workflow.query 20 | def result(self) -> str: 21 | return self._result 22 | -------------------------------------------------------------------------------- /patching/workflow_4_patch_complete.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | 5 | with workflow.unsafe.imports_passed_through(): 6 | from patching.activities import post_patch_activity 7 | 8 | 9 | @workflow.defn 10 | class MyWorkflow: 11 | @workflow.run 12 | async def run(self) -> None: 13 | self._result = await workflow.execute_activity( 14 | post_patch_activity, 15 | schedule_to_close_timeout=timedelta(minutes=5), 16 | ) 17 | 18 | @workflow.query 19 | def result(self) -> str: 20 | return self._result 21 | -------------------------------------------------------------------------------- /polling/README.md: -------------------------------------------------------------------------------- 1 | # Polling 2 | 3 | These samples show three different best practices for polling. 4 | 5 | 1. [Frequently Polling Activity](./frequent/README.md) 6 | 2. [Infrequently Polling Activity](./infrequent/README.md) 7 | 3. [Periodic Polling of a Sequence of Activities](./periodic_sequence/README.md) 8 | 9 | The samples are based on [this](https://community.temporal.io/t/what-is-the-best-practice-for-a-polling-activity/328/2) community forum thread. 10 | -------------------------------------------------------------------------------- /polling/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/polling/__init__.py -------------------------------------------------------------------------------- /polling/frequent/README.md: -------------------------------------------------------------------------------- 1 | # Frequently Polling Activity 2 | 3 | This sample shows how we can implement frequent polling (1 second or faster) inside our Activity. The implementation is a loop that polls our service and then sleeps for the poll interval (1 second in the sample). 4 | 5 | To ensure that polling Activity is restarted in a timely manner, we make sure that it heartbeats on every iteration. Note that heartbeating only works if we set the `heartbeat_timeout` to a shorter value than the Activity `start_to_close_timeout` timeout. 6 | 7 | To run, first see [README.md](../../README.md) for prerequisites. 8 | 9 | Then, run the following from this directory to run the sample: 10 | 11 | uv run run_worker.py 12 | 13 | Then, in another terminal, run the following to execute the workflow: 14 | 15 | uv run run_frequent.py 16 | 17 | The Workflow will continue to poll the service and heartbeat on every iteration until it succeeds. 18 | 19 | Note that with frequent polling, the Activity may execute for a long time, and it may be beneficial to set a Timeout on the Activity to avoid long-running Activities that are not heartbeating. 20 | 21 | If the polling interval needs to be changed during runtime, the Activity needs to be canceled and a new instance with the updated arguments needs to be started. 22 | -------------------------------------------------------------------------------- /polling/frequent/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/polling/frequent/__init__.py -------------------------------------------------------------------------------- /polling/frequent/activities.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio import activity 4 | 5 | from polling.test_service import ComposeGreetingInput, get_service_result 6 | 7 | 8 | @activity.defn 9 | async def compose_greeting(input: ComposeGreetingInput) -> str: 10 | while True: 11 | try: 12 | try: 13 | result = await get_service_result(input) 14 | activity.logger.info(f"Exiting activity ${result}") 15 | return result 16 | except Exception: 17 | # swallow exception since service is down 18 | activity.logger.debug("Failed, trying again shortly", exc_info=True) 19 | 20 | activity.heartbeat("Invoking activity") 21 | await asyncio.sleep(1) 22 | except asyncio.CancelledError: 23 | # activity was either cancelled or workflow was completed or worker shut down 24 | # if you need to clean up you can catch this. 25 | # Here we are just reraising the exception 26 | raise 27 | -------------------------------------------------------------------------------- /polling/frequent/run_frequent.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | 5 | from polling.frequent.workflows import GreetingWorkflow 6 | 7 | 8 | async def main(): 9 | client = await Client.connect("localhost:7233") 10 | result = await client.execute_workflow( 11 | GreetingWorkflow.run, 12 | "World", 13 | id="frequent-activity-retry", 14 | task_queue="frequent-activity-retry-task-queue", 15 | ) 16 | print(f"Result: {result}") 17 | 18 | 19 | if __name__ == "__main__": 20 | asyncio.run(main()) 21 | -------------------------------------------------------------------------------- /polling/frequent/run_worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | from temporalio.worker import Worker 5 | 6 | from polling.frequent.activities import compose_greeting 7 | from polling.frequent.workflows import GreetingWorkflow 8 | 9 | 10 | async def main(): 11 | client = await Client.connect("localhost:7233") 12 | 13 | worker = Worker( 14 | client, 15 | task_queue="frequent-activity-retry-task-queue", 16 | workflows=[GreetingWorkflow], 17 | activities=[compose_greeting], 18 | ) 19 | await worker.run() 20 | 21 | 22 | if __name__ == "__main__": 23 | asyncio.run(main()) 24 | -------------------------------------------------------------------------------- /polling/frequent/workflows.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | 5 | with workflow.unsafe.imports_passed_through(): 6 | from polling.frequent.activities import ComposeGreetingInput, compose_greeting 7 | 8 | 9 | @workflow.defn 10 | class GreetingWorkflow: 11 | @workflow.run 12 | async def run(self, name: str) -> str: 13 | return await workflow.execute_activity( 14 | compose_greeting, 15 | ComposeGreetingInput("Hello", name), 16 | start_to_close_timeout=timedelta(seconds=60), 17 | heartbeat_timeout=timedelta(seconds=2), 18 | ) 19 | -------------------------------------------------------------------------------- /polling/infrequent/README.md: -------------------------------------------------------------------------------- 1 | # Infrequently Polling Activity 2 | 3 | This sample shows how to use Activity retries for infrequent polling of a third-party service (for example via REST). This method can be used for infrequent polls of one minute or slower. 4 | 5 | Activity retries are utilized for this option, setting the following Retry options: 6 | 7 | - `backoff_coefficient`: to 1 8 | - `initial_interval`: to the polling interval (in this sample set to 60 seconds) 9 | 10 | This will enable the Activity to be retried exactly on the set interval. 11 | 12 | To run, first see [README.md](../../README.md) for prerequisites. 13 | 14 | Then, run the following from this directory to run the sample: 15 | 16 | uv run run_worker.py 17 | 18 | Then, in another terminal, run the following to execute the workflow: 19 | 20 | uv run run_infrequent.py 21 | 22 | 23 | Since the test service simulates being _down_ for four polling attempts and then returns _OK_ on the fifth poll attempt, the Workflow will perform four Activity retries with a 60-second poll interval, and then return the service result on the successful fifth attempt. 24 | 25 | Note that individual Activity retries are not recorded in Workflow History, so this approach can poll for a very long time without affecting the history size. 26 | -------------------------------------------------------------------------------- /polling/infrequent/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/polling/infrequent/__init__.py -------------------------------------------------------------------------------- /polling/infrequent/activities.py: -------------------------------------------------------------------------------- 1 | from temporalio import activity 2 | 3 | from polling.test_service import ComposeGreetingInput, get_service_result 4 | 5 | 6 | @activity.defn 7 | async def compose_greeting(input: ComposeGreetingInput) -> str: 8 | # If this raises an exception because it's not done yet, the activity will 9 | # continually be scheduled for retry 10 | return await get_service_result(input) 11 | -------------------------------------------------------------------------------- /polling/infrequent/run_infrequent.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | 5 | from polling.infrequent.workflows import GreetingWorkflow 6 | 7 | 8 | async def main(): 9 | client = await Client.connect("localhost:7233") 10 | result = await client.execute_workflow( 11 | GreetingWorkflow.run, 12 | "World", 13 | id="infrequent-activity-retry", 14 | task_queue="infrequent-activity-retry-task-queue", 15 | ) 16 | print(f"Result: {result}") 17 | 18 | 19 | if __name__ == "__main__": 20 | asyncio.run(main()) 21 | -------------------------------------------------------------------------------- /polling/infrequent/run_worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | from temporalio.worker import Worker 5 | 6 | from polling.infrequent.activities import compose_greeting 7 | from polling.infrequent.workflows import GreetingWorkflow 8 | 9 | 10 | async def main(): 11 | client = await Client.connect("localhost:7233") 12 | 13 | worker = Worker( 14 | client, 15 | task_queue="infrequent-activity-retry-task-queue", 16 | workflows=[GreetingWorkflow], 17 | activities=[compose_greeting], 18 | ) 19 | await worker.run() 20 | 21 | 22 | if __name__ == "__main__": 23 | asyncio.run(main()) 24 | -------------------------------------------------------------------------------- /polling/infrequent/workflows.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | from temporalio.common import RetryPolicy 5 | 6 | with workflow.unsafe.imports_passed_through(): 7 | from polling.infrequent.activities import ComposeGreetingInput, compose_greeting 8 | 9 | 10 | @workflow.defn 11 | class GreetingWorkflow: 12 | @workflow.run 13 | async def run(self, name: str) -> str: 14 | return await workflow.execute_activity( 15 | compose_greeting, 16 | ComposeGreetingInput("Hello", name), 17 | start_to_close_timeout=timedelta(seconds=2), 18 | retry_policy=RetryPolicy( 19 | backoff_coefficient=1.0, 20 | initial_interval=timedelta(seconds=60), 21 | ), 22 | ) 23 | -------------------------------------------------------------------------------- /polling/periodic_sequence/README.md: -------------------------------------------------------------------------------- 1 | # Periodic Polling of a Sequence of Activities 2 | 3 | This sample demonstrates how to use a Child Workflow for periodic Activity polling. 4 | 5 | This is a rare scenario where polling requires execution of a Sequence of Activities, or Activity arguments need to change between polling retries. For this case we use a Child Workflow to call polling activities a set number of times in a loop and then periodically call Continue-As-New. 6 | 7 | To run, first see [README.md](../../README.md) for prerequisites. 8 | 9 | Then, run the following from this directory to run the sample: 10 | 11 | uv run run_worker.py 12 | 13 | Then, in another terminal, run the following to execute the workflow: 14 | 15 | uv run run_periodic.py 16 | 17 | 18 | This will start a Workflow and Child Workflow to periodically poll an Activity. 19 | The Parent Workflow is not aware about the Child Workflow calling Continue-As-New, and it gets notified when it completes (or fails). -------------------------------------------------------------------------------- /polling/periodic_sequence/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/polling/periodic_sequence/__init__.py -------------------------------------------------------------------------------- /polling/periodic_sequence/activities.py: -------------------------------------------------------------------------------- 1 | from typing import Any, NoReturn 2 | 3 | from temporalio import activity 4 | 5 | 6 | @activity.defn 7 | async def compose_greeting(input: Any) -> NoReturn: 8 | raise RuntimeError("Service is down") 9 | -------------------------------------------------------------------------------- /polling/periodic_sequence/run_periodic.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | 5 | from polling.periodic_sequence.workflows import GreetingWorkflow 6 | 7 | 8 | async def main(): 9 | client = await Client.connect("localhost:7233") 10 | result = await client.execute_workflow( 11 | GreetingWorkflow.run, 12 | "World", 13 | id="periodic-child-workflow-retry", 14 | task_queue="periodic-retry-task-queue", 15 | ) 16 | print(f"Result: {result}") 17 | 18 | 19 | if __name__ == "__main__": 20 | asyncio.run(main()) 21 | -------------------------------------------------------------------------------- /polling/periodic_sequence/run_worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | from temporalio.worker import Worker 5 | 6 | from polling.periodic_sequence.activities import compose_greeting 7 | from polling.periodic_sequence.workflows import ChildWorkflow, GreetingWorkflow 8 | 9 | 10 | async def main(): 11 | client = await Client.connect("localhost:7233") 12 | 13 | worker = Worker( 14 | client, 15 | task_queue="periodic-retry-task-queue", 16 | workflows=[GreetingWorkflow, ChildWorkflow], 17 | activities=[compose_greeting], 18 | ) 19 | await worker.run() 20 | 21 | 22 | if __name__ == "__main__": 23 | asyncio.run(main()) 24 | -------------------------------------------------------------------------------- /polling/periodic_sequence/workflows.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from datetime import timedelta 3 | 4 | from temporalio import workflow 5 | from temporalio.common import RetryPolicy 6 | from temporalio.exceptions import ActivityError 7 | 8 | with workflow.unsafe.imports_passed_through(): 9 | from polling.periodic_sequence.activities import compose_greeting 10 | from polling.test_service import ComposeGreetingInput 11 | 12 | 13 | @workflow.defn 14 | class GreetingWorkflow: 15 | @workflow.run 16 | async def run(self, name: str) -> str: 17 | return await workflow.execute_child_workflow( 18 | ChildWorkflow.run, 19 | name, 20 | ) 21 | 22 | 23 | @workflow.defn 24 | class ChildWorkflow: 25 | @workflow.run 26 | async def run(self, name: str) -> str: 27 | for i in range(10): 28 | try: 29 | return await workflow.execute_activity( 30 | compose_greeting, 31 | ComposeGreetingInput("Hello", name), 32 | start_to_close_timeout=timedelta(seconds=4), 33 | retry_policy=RetryPolicy( 34 | maximum_attempts=1, 35 | ), 36 | ) 37 | 38 | except ActivityError: 39 | workflow.logger.error("Activity failed, retrying in 1 seconds") 40 | await asyncio.sleep(1) 41 | 42 | workflow.continue_as_new(name) 43 | -------------------------------------------------------------------------------- /polling/test_service.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Counter 3 | 4 | from temporalio import activity 5 | 6 | attempts = Counter[str]() 7 | ERROR_ATTEMPTS = 5 8 | 9 | 10 | @dataclass 11 | class ComposeGreetingInput: 12 | greeting: str 13 | name: str 14 | 15 | 16 | async def get_service_result(input): 17 | workflow_id = activity.info().workflow_id 18 | attempts[workflow_id] += 1 19 | 20 | print(f"Attempt {attempts[workflow_id]} of {ERROR_ATTEMPTS} to invoke service") 21 | if attempts[workflow_id] == ERROR_ATTEMPTS: 22 | return f"{input.greeting}, {input.name}!" 23 | raise Exception("service is down") 24 | -------------------------------------------------------------------------------- /prometheus/README.md: -------------------------------------------------------------------------------- 1 | # Prometheus Sample 2 | 3 | This sample shows how to have SDK Prometheus metrics made available via HTTP. 4 | 5 | To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory to start the 6 | worker: 7 | 8 | uv run worker.py 9 | 10 | This will start the worker and the metrics will be visible for this process at http://127.0.0.1:9000/metrics. 11 | 12 | Then, in another terminal, run the following to execute a workflow: 13 | 14 | uv run starter.py 15 | 16 | After executing the workflow, the process will stay open so the metrics if this separate process can be accessed at 17 | http://127.0.0.1:9001/metrics. -------------------------------------------------------------------------------- /prometheus/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/prometheus/__init__.py -------------------------------------------------------------------------------- /prometheus/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | 5 | from prometheus.worker import GreetingWorkflow, init_runtime_with_prometheus 6 | 7 | interrupt_event = asyncio.Event() 8 | 9 | 10 | async def main(): 11 | runtime = init_runtime_with_prometheus(9001) 12 | 13 | # Connect client 14 | client = await Client.connect( 15 | "localhost:7233", 16 | runtime=runtime, 17 | ) 18 | 19 | # Run workflow 20 | result = await client.execute_workflow( 21 | GreetingWorkflow.run, 22 | "Temporal", 23 | id="prometheus-workflow-id", 24 | task_queue="prometheus-task-queue", 25 | ) 26 | print(f"Workflow result: {result}") 27 | print( 28 | "Prometheus client metrics available at http://127.0.0.1:9001/metrics, ctrl+c to exit" 29 | ) 30 | await interrupt_event.wait() 31 | 32 | 33 | if __name__ == "__main__": 34 | loop = asyncio.new_event_loop() 35 | try: 36 | loop.run_until_complete(main()) 37 | except KeyboardInterrupt: 38 | interrupt_event.set() 39 | loop.run_until_complete(loop.shutdown_asyncgens()) 40 | -------------------------------------------------------------------------------- /prometheus/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from datetime import timedelta 3 | 4 | from temporalio import activity, workflow 5 | from temporalio.client import Client 6 | from temporalio.runtime import PrometheusConfig, Runtime, TelemetryConfig 7 | from temporalio.worker import Worker 8 | 9 | 10 | @workflow.defn 11 | class GreetingWorkflow: 12 | @workflow.run 13 | async def run(self, name: str) -> str: 14 | return await workflow.execute_activity( 15 | compose_greeting, 16 | name, 17 | start_to_close_timeout=timedelta(seconds=10), 18 | ) 19 | 20 | 21 | @activity.defn 22 | async def compose_greeting(name: str) -> str: 23 | return f"Hello, {name}!" 24 | 25 | 26 | interrupt_event = asyncio.Event() 27 | 28 | 29 | def init_runtime_with_prometheus(port: int) -> Runtime: 30 | # Create runtime for use with Prometheus metrics 31 | return Runtime( 32 | telemetry=TelemetryConfig( 33 | metrics=PrometheusConfig(bind_address=f"127.0.0.1:{port}") 34 | ) 35 | ) 36 | 37 | 38 | async def main(): 39 | runtime = init_runtime_with_prometheus(9000) 40 | 41 | # Connect client 42 | client = await Client.connect( 43 | "localhost:7233", 44 | runtime=runtime, 45 | ) 46 | 47 | # Run a worker for the workflow 48 | async with Worker( 49 | client, 50 | task_queue="prometheus-task-queue", 51 | workflows=[GreetingWorkflow], 52 | activities=[compose_greeting], 53 | ): 54 | # Wait until interrupted 55 | print("Worker started") 56 | print( 57 | "Prometheus metrics available at http://127.0.0.1:9000/metrics, ctrl+c to exit" 58 | ) 59 | await interrupt_event.wait() 60 | print("Shutting down") 61 | 62 | 63 | if __name__ == "__main__": 64 | loop = asyncio.new_event_loop() 65 | try: 66 | loop.run_until_complete(main()) 67 | except KeyboardInterrupt: 68 | interrupt_event.set() 69 | loop.run_until_complete(loop.shutdown_asyncgens()) 70 | -------------------------------------------------------------------------------- /pydantic_converter/README.md: -------------------------------------------------------------------------------- 1 | # Pydantic Converter Sample 2 | 3 | This sample shows how to use the Pydantic data converter. 4 | 5 | For this sample, the optional `pydantic_converter` dependency group must be included. To include, run: 6 | 7 | uv sync --group pydantic-converter 8 | 9 | To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory to start the 10 | worker: 11 | 12 | uv run worker.py 13 | 14 | This will start the worker. Then, in another terminal, run the following to execute the workflow: 15 | 16 | uv run starter.py 17 | 18 | In the worker terminal, the workflow and its activity will log that it received the Pydantic models. In the starter 19 | terminal, the Pydantic models in the workflow result will be logged. 20 | -------------------------------------------------------------------------------- /pydantic_converter/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/pydantic_converter/__init__.py -------------------------------------------------------------------------------- /pydantic_converter/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from datetime import datetime 4 | from ipaddress import IPv4Address 5 | 6 | from temporalio.client import Client 7 | from temporalio.contrib.pydantic import pydantic_data_converter 8 | 9 | from pydantic_converter.worker import MyPydanticModel, MyWorkflow 10 | 11 | 12 | async def main(): 13 | logging.basicConfig(level=logging.INFO) 14 | # Connect client using the Pydantic converter 15 | client = await Client.connect( 16 | "localhost:7233", data_converter=pydantic_data_converter 17 | ) 18 | 19 | # Run workflow 20 | result = await client.execute_workflow( 21 | MyWorkflow.run, 22 | [ 23 | MyPydanticModel( 24 | some_ip=IPv4Address("127.0.0.1"), 25 | some_date=datetime(2000, 1, 2, 3, 4, 5), 26 | ), 27 | MyPydanticModel( 28 | some_ip=IPv4Address("127.0.0.2"), 29 | some_date=datetime(2001, 2, 3, 4, 5, 6), 30 | ), 31 | ], 32 | id="pydantic_converter-workflow-id", 33 | task_queue="pydantic_converter-task-queue", 34 | ) 35 | logging.info("Got models from client: %s" % result) 36 | 37 | 38 | if __name__ == "__main__": 39 | asyncio.run(main()) 40 | -------------------------------------------------------------------------------- /pydantic_converter_v1/README.md: -------------------------------------------------------------------------------- 1 | # Pydantic v1 Converter Sample 2 | 3 | **This sample shows how to use Pydantic v1 with Temporal. This is not recommended: use Pydantic v2 if possible, and use the 4 | main [pydantic_converter](../pydantic_converter/README.md) sample.** 5 | 6 | To install, run: 7 | 8 | uv sync --group pydantic-converter 9 | uv run pip uninstall pydantic pydantic-core 10 | uv run pip install pydantic==1.10 11 | 12 | To run, first see the root [README.md](../README.md) for prerequisites. Then, run the following from this directory to start the 13 | worker: 14 | 15 | uv run worker.py 16 | 17 | This will start the worker. Then, in another terminal, run the following to execute the workflow: 18 | 19 | uv run starter.py 20 | 21 | In the worker terminal, the workflow and its activity will log that it received the Pydantic models. In the starter 22 | terminal, the Pydantic models in the workflow result will be logged. 23 | 24 | ### Notes 25 | 26 | This sample also demonstrates use of `datetime` inside of Pydantic v1 models. Due to a known issue with the Temporal 27 | sandbox, this class is seen by Pydantic v1 as `date` instead of `datetime` upon deserialization. This is due to a 28 | [known Python issue](https://github.com/python/cpython/issues/89010) where, when we proxy the `datetime` class in the 29 | sandbox to prevent non-deterministic calls like `now()`, `issubclass` fails for the proxy type causing Pydantic v1 to think 30 | it's a `date` instead. In `worker.py`, we have shown a workaround of disabling restrictions on `datetime` which solves 31 | this issue but no longer protects against workflow developers making non-deterministic calls in that module. -------------------------------------------------------------------------------- /pydantic_converter_v1/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/pydantic_converter_v1/__init__.py -------------------------------------------------------------------------------- /pydantic_converter_v1/converter.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Any, Optional 3 | 4 | from pydantic.json import pydantic_encoder 5 | from temporalio.api.common.v1 import Payload 6 | from temporalio.converter import ( 7 | CompositePayloadConverter, 8 | DataConverter, 9 | DefaultPayloadConverter, 10 | JSONPlainPayloadConverter, 11 | ) 12 | 13 | 14 | class PydanticJSONPayloadConverter(JSONPlainPayloadConverter): 15 | """Pydantic JSON payload converter. 16 | 17 | This extends the :py:class:`JSONPlainPayloadConverter` to override 18 | :py:meth:`to_payload` using the Pydantic encoder. 19 | """ 20 | 21 | def to_payload(self, value: Any) -> Optional[Payload]: 22 | """Convert all values with Pydantic encoder or fail. 23 | 24 | Like the base class, we fail if we cannot convert. This payload 25 | converter is expected to be the last in the chain, so it can fail if 26 | unable to convert. 27 | """ 28 | # We let JSON conversion errors be thrown to caller 29 | return Payload( 30 | metadata={"encoding": self.encoding.encode()}, 31 | data=json.dumps( 32 | value, separators=(",", ":"), sort_keys=True, default=pydantic_encoder 33 | ).encode(), 34 | ) 35 | 36 | 37 | class PydanticPayloadConverter(CompositePayloadConverter): 38 | """Payload converter that replaces Temporal JSON conversion with Pydantic 39 | JSON conversion. 40 | """ 41 | 42 | def __init__(self) -> None: 43 | super().__init__( 44 | *( 45 | ( 46 | c 47 | if not isinstance(c, JSONPlainPayloadConverter) 48 | else PydanticJSONPayloadConverter() 49 | ) 50 | for c in DefaultPayloadConverter.default_encoding_payload_converters 51 | ) 52 | ) 53 | 54 | 55 | pydantic_data_converter = DataConverter( 56 | payload_converter_class=PydanticPayloadConverter 57 | ) 58 | """Data converter using Pydantic JSON conversion.""" 59 | -------------------------------------------------------------------------------- /pydantic_converter_v1/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from datetime import datetime 4 | from ipaddress import IPv4Address 5 | 6 | from temporalio.client import Client 7 | 8 | from pydantic_converter_v1.converter import pydantic_data_converter 9 | from pydantic_converter_v1.worker import MyPydanticModel, MyWorkflow 10 | 11 | 12 | async def main(): 13 | logging.basicConfig(level=logging.INFO) 14 | # Connect client using the Pydantic converter 15 | client = await Client.connect( 16 | "localhost:7233", data_converter=pydantic_data_converter 17 | ) 18 | 19 | # Run workflow 20 | result = await client.execute_workflow( 21 | MyWorkflow.run, 22 | [ 23 | MyPydanticModel( 24 | some_ip=IPv4Address("127.0.0.1"), 25 | some_date=datetime(2000, 1, 2, 3, 4, 5), 26 | ), 27 | MyPydanticModel( 28 | some_ip=IPv4Address("127.0.0.2"), 29 | some_date=datetime(2001, 2, 3, 4, 5, 6), 30 | ), 31 | ], 32 | id="pydantic_converter-workflow-id", 33 | task_queue="pydantic_converter-task-queue", 34 | ) 35 | logging.info("Got models from client: %s" % result) 36 | 37 | 38 | if __name__ == "__main__": 39 | asyncio.run(main()) 40 | -------------------------------------------------------------------------------- /replay/README.md: -------------------------------------------------------------------------------- 1 | # Replay Sample 2 | 3 | This sample shows you how you can verify changes to workflow code are compatible with existing 4 | workflow histories. 5 | 6 | To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory to start the 7 | worker: 8 | 9 | uv run worker.py 10 | 11 | This will start the worker. Then, in another terminal, run the following to execute a workflow: 12 | 13 | uv run starter.py 14 | 15 | Next, run the replayer: 16 | 17 | uv run replayer.py 18 | 19 | Which should produce some output like: 20 | 21 | WorkflowReplayResults(replay_failures={}) 22 | 23 | Great! Replay worked. Of course, the reason for the exercise is to catch if you've changed workflow 24 | code in a manner which is *not* compatible with the existing histories. Try it. Open up `worker.py` 25 | and change the `JustActivity` workflow to sleep just before running the activity. Add 26 | `await asyncio.sleep(0.1)` just before the line with `workflow.execute_activity`. 27 | 28 | Now run the replayer again. The results from the `replay_workflows` call now indeed contains a 29 | failure! Something like: 30 | 31 | WorkflowReplayResults(replay_failures={'e6418672-323c-4868-9de4-ece8f34fec53': NondeterminismError('Workflow activation completion failed: Failure { failure: Some(Failure { message: "Nondeterminism(\\"Timer machine does not handle this event: HistoryEvent(id: 8, Some(ActivityTaskScheduled))\\")", source: "", stack_trace: "", encoded_attributes: None, cause: None, failure_info: Some(ApplicationFailureInfo(ApplicationFailureInfo { r#type: "", non_retryable: false, details: None })) }) }')}) 32 | 33 | This is telling you that the workflow is not compatible with the existing history. Phew! Glad we 34 | didn't deploy that one. 35 | -------------------------------------------------------------------------------- /replay/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/replay/__init__.py -------------------------------------------------------------------------------- /replay/replayer.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | from temporalio.worker import Replayer 5 | 6 | from replay.worker import JustActivity, JustTimer, TimerThenActivity 7 | 8 | 9 | async def main(): 10 | # Connect client 11 | client = await Client.connect("localhost:7233") 12 | 13 | # Fetch the histories of the workflows to be replayed 14 | workflows = client.list_workflows('WorkflowId="replayer-workflow-id"') 15 | histories = workflows.map_histories() 16 | replayer = Replayer(workflows=[JustActivity, JustTimer, TimerThenActivity]) 17 | results = await replayer.replay_workflows(histories, raise_on_replay_failure=False) 18 | print(results) 19 | 20 | 21 | if __name__ == "__main__": 22 | asyncio.run(main()) 23 | -------------------------------------------------------------------------------- /replay/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | 5 | from replay.worker import JustActivity, JustTimer, TimerThenActivity 6 | 7 | 8 | async def main(): 9 | # Connect client 10 | client = await Client.connect("localhost:7233") 11 | 12 | # Run a few workflows 13 | # Importantly, normally we would *not* advise re-using the same workflow ID for all of these, 14 | # but we do this to avoid requiring advanced visibility when we want to fetch all the histories 15 | # in the replayer. 16 | result = await client.execute_workflow( 17 | JustActivity.run, 18 | "replayer", 19 | id=f"replayer-workflow-id", 20 | task_queue="replay-sample", 21 | ) 22 | print(f"JustActivity Workflow result: {result}") 23 | 24 | result = await client.execute_workflow( 25 | JustTimer.run, 26 | "replayer", 27 | id=f"replayer-workflow-id", 28 | task_queue="replay-sample", 29 | ) 30 | print(f"JustTimer Workflow result: {result}") 31 | 32 | result = await client.execute_workflow( 33 | TimerThenActivity.run, 34 | "replayer", 35 | id=f"replayer-workflow-id", 36 | task_queue="replay-sample", 37 | ) 38 | print(f"TimerThenActivity Workflow result: {result}") 39 | 40 | 41 | if __name__ == "__main__": 42 | asyncio.run(main()) 43 | -------------------------------------------------------------------------------- /resource_pool/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/resource_pool/__init__.py -------------------------------------------------------------------------------- /resource_pool/pool_client/__init__.py: -------------------------------------------------------------------------------- 1 | from .resource_pool_client import ResourcePoolClient 2 | from .resource_pool_workflow import ResourcePoolWorkflow 3 | -------------------------------------------------------------------------------- /resource_pool/shared.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | 3 | RESOURCE_POOL_WORKFLOW_ID = "resource_pool" 4 | 5 | 6 | @dataclass 7 | class AcquireRequest: 8 | workflow_id: str 9 | 10 | 11 | @dataclass 12 | class AcquireResponse: 13 | release_key: str 14 | resource: str 15 | 16 | 17 | @dataclass 18 | class DetachedResource: 19 | resource: str 20 | release_key: str 21 | 22 | 23 | @dataclass 24 | class AcquiredResource: 25 | resource: str 26 | release_key: str 27 | detached: bool = field(default=False) 28 | 29 | def detach(self) -> DetachedResource: 30 | self.detached = True 31 | return DetachedResource(resource=self.resource, release_key=self.release_key) 32 | -------------------------------------------------------------------------------- /resource_pool/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from resource_pool.pool_client.resource_pool_workflow import ResourcePoolWorkflow 8 | from resource_pool.resource_user_workflow import ResourceUserWorkflow, use_resource 9 | 10 | 11 | async def main() -> None: 12 | logging.basicConfig(level=logging.INFO) 13 | 14 | # Start client 15 | client = await Client.connect("localhost:7233") 16 | 17 | # Run a worker for the workflow 18 | worker = Worker( 19 | client, 20 | task_queue="resource_pool-task-queue", 21 | workflows=[ResourcePoolWorkflow, ResourceUserWorkflow], 22 | activities=[ 23 | use_resource, 24 | ], 25 | ) 26 | 27 | await worker.run() 28 | 29 | 30 | if __name__ == "__main__": 31 | asyncio.run(main()) 32 | -------------------------------------------------------------------------------- /schedules/README.md: -------------------------------------------------------------------------------- 1 | # Schedules Samples 2 | 3 | These samples show how to schedule a Workflow Execution and control certain action. 4 | 5 | To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory to run the `schedules/` sample: 6 | 7 | uv run run_worker.py 8 | uv run start_schedule.py 9 | 10 | Replace `start_schedule.py` in the command with any other example filename to run it instead. 11 | 12 | uv run backfill_schedule.py 13 | uv run delete_schedule.py 14 | uv run describe_schedule.py 15 | uv run list_schedule.py 16 | uv run pause_schedule.py 17 | python run python trigger_schedule.py 18 | uv run update_schedule.py 19 | 20 | - create: Creates a new Schedule. Newly created Schedules return a Schedule ID to be used in other Schedule commands. 21 | - backfill: Backfills the Schedule by going through the specified time periods as if they passed right now. 22 | - delete: Deletes a Schedule. Deleting a Schedule does not affect any Workflows started by the Schedule. 23 | - describe: Shows the current Schedule configuration. This command also provides information about past, current, and future Workflow Runs. 24 | - list: Lists Schedules. 25 | - pause: Pause and unpause a Schedule. 26 | - trigger: Triggers an immediate action with a given Schedule. By default, this action is subject to the Overlap Policy of the Schedule. 27 | - update: Updates an existing Schedule. 28 | -------------------------------------------------------------------------------- /schedules/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/schedules/__init__.py -------------------------------------------------------------------------------- /schedules/backfill_schedule.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from datetime import datetime, timedelta 3 | 4 | from temporalio.client import Client, ScheduleBackfill, ScheduleOverlapPolicy 5 | 6 | 7 | async def main(): 8 | client = await Client.connect("localhost:7233") 9 | handle = client.get_schedule_handle( 10 | "workflow-schedule-id", 11 | ) 12 | now = datetime.utcnow() 13 | await handle.backfill( 14 | ScheduleBackfill( 15 | start_at=now - timedelta(minutes=10), 16 | end_at=now - timedelta(minutes=9), 17 | overlap=ScheduleOverlapPolicy.ALLOW_ALL, 18 | ), 19 | ) 20 | 21 | 22 | if __name__ == "__main__": 23 | asyncio.run(main()) 24 | -------------------------------------------------------------------------------- /schedules/delete_schedule.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | 5 | 6 | async def main(): 7 | client = await Client.connect("localhost:7233") 8 | handle = client.get_schedule_handle( 9 | "workflow-schedule-id", 10 | ) 11 | 12 | await handle.delete() 13 | 14 | 15 | if __name__ == "__main__": 16 | asyncio.run(main()) 17 | -------------------------------------------------------------------------------- /schedules/describe_schedule.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | 5 | 6 | async def main(): 7 | client = await Client.connect("localhost:7233") 8 | handle = client.get_schedule_handle( 9 | "workflow-schedule-id", 10 | ) 11 | 12 | desc = await handle.describe() 13 | 14 | print(f"Returns the note: {desc.schedule.state.note}") 15 | 16 | 17 | if __name__ == "__main__": 18 | asyncio.run(main()) 19 | -------------------------------------------------------------------------------- /schedules/list_schedule.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | 5 | 6 | async def main() -> None: 7 | client = await Client.connect("localhost:7233") 8 | 9 | async for schedule in await client.list_schedules(): 10 | print(f"List Schedule Info: {schedule.info}.") 11 | 12 | 13 | if __name__ == "__main__": 14 | asyncio.run(main()) 15 | -------------------------------------------------------------------------------- /schedules/pause_schedule.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | 5 | 6 | async def main(): 7 | client = await Client.connect("localhost:7233") 8 | handle = client.get_schedule_handle( 9 | "workflow-schedule-id", 10 | ) 11 | 12 | await handle.pause(note="Pausing the schedule for now") 13 | 14 | 15 | if __name__ == "__main__": 16 | asyncio.run(main()) 17 | -------------------------------------------------------------------------------- /schedules/run_worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | from temporalio.worker import Worker 5 | from your_activities import your_activity 6 | from your_workflows import YourSchedulesWorkflow 7 | 8 | 9 | async def main(): 10 | client = await Client.connect("localhost:7233") 11 | worker = Worker( 12 | client, 13 | task_queue="schedules-task-queue", 14 | workflows=[YourSchedulesWorkflow], 15 | activities=[your_activity], 16 | ) 17 | await worker.run() 18 | 19 | 20 | if __name__ == "__main__": 21 | asyncio.run(main()) 22 | -------------------------------------------------------------------------------- /schedules/start_schedule.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from datetime import timedelta 3 | 4 | from temporalio.client import ( 5 | Client, 6 | Schedule, 7 | ScheduleActionStartWorkflow, 8 | ScheduleIntervalSpec, 9 | ScheduleSpec, 10 | ScheduleState, 11 | ) 12 | from your_workflows import YourSchedulesWorkflow 13 | 14 | 15 | async def main(): 16 | client = await Client.connect("localhost:7233") 17 | await client.create_schedule( 18 | "workflow-schedule-id", 19 | Schedule( 20 | action=ScheduleActionStartWorkflow( 21 | YourSchedulesWorkflow.run, 22 | "my schedule arg", 23 | id="schedules-workflow-id", 24 | task_queue="schedules-task-queue", 25 | ), 26 | spec=ScheduleSpec( 27 | intervals=[ScheduleIntervalSpec(every=timedelta(minutes=2))] 28 | ), 29 | state=ScheduleState(note="Here's a note on my Schedule."), 30 | ), 31 | ) 32 | 33 | 34 | if __name__ == "__main__": 35 | asyncio.run(main()) 36 | -------------------------------------------------------------------------------- /schedules/trigger_schedule.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import Client 4 | 5 | 6 | async def main(): 7 | client = await Client.connect("localhost:7233") 8 | handle = client.get_schedule_handle( 9 | "workflow-schedule-id", 10 | ) 11 | 12 | await handle.trigger() 13 | 14 | 15 | if __name__ == "__main__": 16 | asyncio.run(main()) 17 | -------------------------------------------------------------------------------- /schedules/update_schedule.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from temporalio.client import ( 4 | Client, 5 | ScheduleActionStartWorkflow, 6 | ScheduleUpdate, 7 | ScheduleUpdateInput, 8 | ) 9 | 10 | 11 | async def main(): 12 | client = await Client.connect("localhost:7233") 13 | handle = client.get_schedule_handle( 14 | "workflow-schedule-id", 15 | ) 16 | 17 | async def update_schedule_simple(input: ScheduleUpdateInput) -> ScheduleUpdate: 18 | schedule_action = input.description.schedule.action 19 | 20 | if isinstance(schedule_action, ScheduleActionStartWorkflow): 21 | schedule_action.args = ["my new schedule arg"] 22 | return ScheduleUpdate(schedule=input.description.schedule) 23 | 24 | await handle.update(update_schedule_simple) 25 | 26 | 27 | if __name__ == "__main__": 28 | asyncio.run(main()) 29 | -------------------------------------------------------------------------------- /schedules/your_activities.py: -------------------------------------------------------------------------------- 1 | from temporalio import activity 2 | from your_dataobject import YourParams 3 | 4 | 5 | @activity.defn 6 | async def your_activity(input: YourParams) -> str: 7 | return f"{input.greeting}, {input.name}!" 8 | -------------------------------------------------------------------------------- /schedules/your_dataobject.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | 4 | @dataclass 5 | class YourParams: 6 | greeting: str 7 | name: str 8 | -------------------------------------------------------------------------------- /schedules/your_workflows.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | 5 | with workflow.unsafe.imports_passed_through(): 6 | from your_activities import your_activity 7 | from your_dataobject import YourParams 8 | 9 | 10 | @workflow.defn 11 | class YourSchedulesWorkflow: 12 | @workflow.run 13 | async def run(self, name: str) -> str: 14 | return await workflow.execute_activity( 15 | your_activity, 16 | YourParams("Hello", name), 17 | start_to_close_timeout=timedelta(seconds=10), 18 | ) 19 | -------------------------------------------------------------------------------- /sentry/README.md: -------------------------------------------------------------------------------- 1 | # Sentry Sample 2 | 3 | This sample shows how to configure [Sentry](https://sentry.io) to intercept and capture errors from the Temporal SDK. 4 | 5 | For this sample, the optional `sentry` dependency group must be included. To include, run: 6 | 7 | uv sync --group sentry 8 | 9 | To run, first see [README.md](../README.md) for prerequisites. Set `SENTRY_DSN` environment variable to the Sentry DSN. 10 | Then, run the following from this directory to start the worker: 11 | 12 | uv run worker.py 13 | 14 | This will start the worker. Then, in another terminal, run the following to execute the workflow: 15 | 16 | uv run starter.py 17 | 18 | The workflow should complete with the hello result. If you alter the workflow or the activity to raise an 19 | `ApplicationError` instead, it should appear in Sentry. -------------------------------------------------------------------------------- /sentry/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/sentry/__init__.py -------------------------------------------------------------------------------- /sentry/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | 4 | from temporalio.client import Client 5 | 6 | from sentry.worker import GreetingWorkflow 7 | 8 | 9 | async def main(): 10 | # Connect client 11 | client = await Client.connect("localhost:7233") 12 | 13 | # Run workflow 14 | result = await client.execute_workflow( 15 | GreetingWorkflow.run, 16 | "World", 17 | id="sentry-workflow-id", 18 | task_queue="sentry-task-queue", 19 | ) 20 | print(f"Workflow result: {result}") 21 | 22 | 23 | if __name__ == "__main__": 24 | asyncio.run(main()) 25 | -------------------------------------------------------------------------------- /sentry/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import os 4 | from dataclasses import dataclass 5 | from datetime import timedelta 6 | 7 | import sentry_sdk 8 | from temporalio import activity, workflow 9 | from temporalio.client import Client 10 | from temporalio.worker import Worker 11 | 12 | from sentry.interceptor import SentryInterceptor 13 | 14 | 15 | @dataclass 16 | class ComposeGreetingInput: 17 | greeting: str 18 | name: str 19 | 20 | 21 | @activity.defn 22 | async def compose_greeting(input: ComposeGreetingInput) -> str: 23 | activity.logger.info("Running activity with parameter %s" % input) 24 | return f"{input.greeting}, {input.name}!" 25 | 26 | 27 | @workflow.defn 28 | class GreetingWorkflow: 29 | @workflow.run 30 | async def run(self, name: str) -> str: 31 | workflow.logger.info("Running workflow with parameter %s" % name) 32 | return await workflow.execute_activity( 33 | compose_greeting, 34 | ComposeGreetingInput("Hello", name), 35 | start_to_close_timeout=timedelta(seconds=10), 36 | ) 37 | 38 | 39 | async def main(): 40 | # Uncomment the line below to see logging 41 | # logging.basicConfig(level=logging.INFO) 42 | 43 | # Initialize the Sentry SDK 44 | sentry_sdk.init( 45 | dsn=os.environ.get("SENTRY_DSN"), 46 | ) 47 | 48 | # Start client 49 | client = await Client.connect("localhost:7233") 50 | 51 | # Run a worker for the workflow 52 | worker = Worker( 53 | client, 54 | task_queue="sentry-task-queue", 55 | workflows=[GreetingWorkflow], 56 | activities=[compose_greeting], 57 | interceptors=[SentryInterceptor()], # Use SentryInterceptor for error reporting 58 | ) 59 | 60 | await worker.run() 61 | 62 | 63 | if __name__ == "__main__": 64 | asyncio.run(main()) 65 | -------------------------------------------------------------------------------- /sleep_for_days/README.md: -------------------------------------------------------------------------------- 1 | # Sleep for Days 2 | 3 | This sample demonstrates how to create a Temporal workflow that runs forever, sending an email every 30 days. 4 | 5 | To run, first see the main [README.md](../../README.md) for prerequisites. 6 | 7 | Then create two terminals and `cd` to this directory. 8 | 9 | Run the worker in one terminal: 10 | 11 | uv run worker.py 12 | 13 | And execute the workflow in the other terminal: 14 | 15 | uv run starter.py 16 | 17 | This sample will run indefinitely until you send a signal to `complete`. See how to send a signal via Temporal CLI [here](https://docs.temporal.io/cli/workflow#signal). 18 | 19 | -------------------------------------------------------------------------------- /sleep_for_days/__init__.py: -------------------------------------------------------------------------------- 1 | TASK_QUEUE = "sleep-for-days-task-queue" 2 | -------------------------------------------------------------------------------- /sleep_for_days/activities.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from temporalio import activity 4 | 5 | 6 | @dataclass 7 | class SendEmailInput: 8 | email_msg: str 9 | 10 | 11 | @activity.defn() 12 | async def send_email(input: SendEmailInput) -> str: 13 | """ 14 | A stub Activity for sending an email. 15 | """ 16 | result = f"Email message: {input.email_msg}, sent" 17 | activity.logger.info(result) 18 | return result 19 | -------------------------------------------------------------------------------- /sleep_for_days/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import uuid 3 | from typing import Optional 4 | 5 | from temporalio.client import Client 6 | 7 | from sleep_for_days import TASK_QUEUE 8 | from sleep_for_days.workflows import SleepForDaysWorkflow 9 | 10 | 11 | async def main(client: Optional[Client] = None): 12 | client = client or await Client.connect("localhost:7233") 13 | wf_handle = await client.start_workflow( 14 | SleepForDaysWorkflow.run, 15 | id=f"sleep-for-days-workflow-id-{uuid.uuid4()}", 16 | task_queue=TASK_QUEUE, 17 | ) 18 | # Wait for workflow completion (runs indefinitely until it receives a signal) 19 | print(await wf_handle.result()) 20 | 21 | 22 | if __name__ == "__main__": 23 | asyncio.run(main()) 24 | -------------------------------------------------------------------------------- /sleep_for_days/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from sleep_for_days import TASK_QUEUE 8 | from sleep_for_days.activities import send_email 9 | from sleep_for_days.workflows import SleepForDaysWorkflow 10 | 11 | 12 | async def main(): 13 | client = await Client.connect("localhost:7233") 14 | 15 | worker = Worker( 16 | client, 17 | task_queue=TASK_QUEUE, 18 | workflows=[SleepForDaysWorkflow], 19 | activities=[send_email], 20 | ) 21 | 22 | await worker.run() 23 | 24 | 25 | if __name__ == "__main__": 26 | logging.basicConfig(level=logging.INFO) 27 | asyncio.run(main()) 28 | -------------------------------------------------------------------------------- /sleep_for_days/workflows.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from dataclasses import dataclass 3 | from datetime import timedelta 4 | 5 | from temporalio import workflow 6 | 7 | with workflow.unsafe.imports_passed_through(): 8 | from sleep_for_days.activities import SendEmailInput, send_email 9 | 10 | 11 | @workflow.defn() 12 | class SleepForDaysWorkflow: 13 | def __init__(self) -> None: 14 | self.is_complete = False 15 | 16 | @workflow.run 17 | async def run(self) -> str: 18 | while not self.is_complete: 19 | await workflow.execute_activity( 20 | send_email, 21 | SendEmailInput("30 days until the next email"), 22 | start_to_close_timeout=timedelta(seconds=10), 23 | ) 24 | await workflow.wait( 25 | [ 26 | asyncio.create_task(workflow.sleep(timedelta(days=30))), 27 | asyncio.create_task( 28 | workflow.wait_condition(lambda: self.is_complete) 29 | ), 30 | ], 31 | return_when=asyncio.FIRST_COMPLETED, 32 | ) 33 | return "done!" 34 | 35 | @workflow.signal 36 | def complete(self): 37 | self.is_complete = True 38 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/__init__.py -------------------------------------------------------------------------------- /tests/activity_sticky_queues/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/activity_sticky_queues/__init__.py -------------------------------------------------------------------------------- /tests/activity_sticky_queues/activity_sticky_queues_activity_test.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from unittest import mock 3 | 4 | from worker_specific_task_queues import tasks 5 | 6 | RETURNED_PATH = "valid/path" 7 | tasks._get_delay_secs = mock.MagicMock(return_value=0.0001) 8 | tasks._get_local_path = mock.MagicMock(return_value=Path(RETURNED_PATH)) 9 | 10 | 11 | async def test_download_activity(): 12 | worker_id = "an-id" 13 | workflow_uuid = "uuid" 14 | want = Path(RETURNED_PATH) / worker_id / workflow_uuid 15 | 16 | details = tasks.DownloadObj("tdd.com", worker_id, workflow_uuid) 17 | with mock.patch.object(tasks, "write_file") as mock_write: 18 | response = await tasks.download_file_to_worker_filesystem(details) 19 | assert Path(response) == want 20 | mock_write.assert_called_once() 21 | 22 | 23 | async def test_processing_activity(): 24 | file_contents = b"contents" 25 | want = tasks.process_file_contents(file_contents) 26 | 27 | with mock.patch.object(tasks, "read_file", return_value=file_contents): 28 | response = await tasks.work_on_file_in_worker_filesystem(RETURNED_PATH) 29 | assert response == want 30 | 31 | 32 | async def test_clean_up_activity(): 33 | with mock.patch.object(tasks, "delete_file") as mock_delete: 34 | await tasks.clean_up_file_from_worker_filesystem(RETURNED_PATH) 35 | mock_delete.assert_called_once_with(RETURNED_PATH) 36 | -------------------------------------------------------------------------------- /tests/context_propagation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/context_propagation/__init__.py -------------------------------------------------------------------------------- /tests/context_propagation/workflow_test.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | from temporalio import activity 4 | from temporalio.client import Client 5 | from temporalio.exceptions import ApplicationError 6 | from temporalio.worker import Worker 7 | 8 | from context_propagation.interceptor import ContextPropagationInterceptor 9 | from context_propagation.shared import user_id 10 | from context_propagation.workflows import SayHelloWorkflow 11 | 12 | 13 | async def test_workflow_with_context_propagator(client: Client): 14 | # Mock out the activity to assert the context value 15 | @activity.defn(name="say_hello_activity") 16 | async def say_hello_activity_mock(name: str) -> str: 17 | try: 18 | assert user_id.get() == "test-user" 19 | except Exception as err: 20 | raise ApplicationError("Assertion fail", non_retryable=True) from err 21 | return f"Mock for {name}" 22 | 23 | # Replace interceptors in client 24 | new_config = client.config() 25 | new_config["interceptors"] = [ContextPropagationInterceptor()] 26 | client = Client(**new_config) 27 | task_queue = f"tq-{uuid.uuid4()}" 28 | 29 | async with Worker( 30 | client, 31 | task_queue=task_queue, 32 | activities=[say_hello_activity_mock], 33 | workflows=[SayHelloWorkflow], 34 | ): 35 | # Set the user during start/signal, but unset after 36 | token = user_id.set("test-user") 37 | handle = await client.start_workflow( 38 | SayHelloWorkflow.run, 39 | "some-name", 40 | id=f"wf-{uuid.uuid4()}", 41 | task_queue=task_queue, 42 | ) 43 | await handle.signal(SayHelloWorkflow.signal_complete) 44 | user_id.reset(token) 45 | result = await handle.result() 46 | assert result == "Mock for some-name" 47 | -------------------------------------------------------------------------------- /tests/custom_converter/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/custom_converter/__init__.py -------------------------------------------------------------------------------- /tests/custom_converter/workflow_test.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | from temporalio.client import Client 4 | from temporalio.worker import Worker 5 | 6 | from custom_converter.shared import ( 7 | GreetingInput, 8 | GreetingOutput, 9 | greeting_data_converter, 10 | ) 11 | from custom_converter.workflow import GreetingWorkflow 12 | 13 | 14 | async def test_workflow_with_custom_converter(client: Client): 15 | # Replace data converter in client 16 | new_config = client.config() 17 | new_config["data_converter"] = greeting_data_converter 18 | client = Client(**new_config) 19 | task_queue = f"tq-{uuid.uuid4()}" 20 | async with Worker(client, task_queue=task_queue, workflows=[GreetingWorkflow]): 21 | result = await client.execute_workflow( 22 | GreetingWorkflow.run, 23 | GreetingInput("Temporal"), 24 | id=f"wf-{uuid.uuid4()}", 25 | task_queue=task_queue, 26 | ) 27 | assert isinstance(result, GreetingOutput) 28 | assert result.result == "Hello, Temporal" 29 | -------------------------------------------------------------------------------- /tests/custom_metric/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/custom_metric/__init__.py -------------------------------------------------------------------------------- /tests/custom_metric/workflow_test.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | from temporalio import activity 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from custom_metric.worker import StartTwoActivitiesWorkflow 8 | 9 | _TASK_QUEUE = "custom-metric-task-queue" 10 | 11 | activity_counter = 0 12 | 13 | 14 | async def test_custom_metric_workflow(client: Client): 15 | @activity.defn(name="print_and_sleep") 16 | async def print_message_mock(): 17 | global activity_counter 18 | activity_counter += 1 19 | 20 | async with Worker( 21 | client, 22 | task_queue=_TASK_QUEUE, 23 | workflows=[StartTwoActivitiesWorkflow], 24 | activities=[print_message_mock], 25 | ): 26 | result = await client.execute_workflow( 27 | StartTwoActivitiesWorkflow.run, 28 | id=str(uuid.uuid4()), 29 | task_queue=_TASK_QUEUE, 30 | ) 31 | assert result is None 32 | assert activity_counter == 2 33 | -------------------------------------------------------------------------------- /tests/hello/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/hello/__init__.py -------------------------------------------------------------------------------- /tests/hello/hello_activity_choice_test.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from temporalio.testing import ActivityEnvironment 3 | 4 | from hello.hello_activity_choice import ( 5 | order_apples, 6 | order_bananas, 7 | order_cherries, 8 | order_oranges, 9 | ) 10 | 11 | # A list of tuples where each tuple contains: 12 | # - The activity function 13 | # - The order amount 14 | # - The expected result string 15 | activity_test_data = [ 16 | (order_apples, 5, "Ordered 5 Apples..."), 17 | (order_bananas, 5, "Ordered 5 Bananas..."), 18 | (order_cherries, 5, "Ordered 5 Cherries..."), 19 | (order_oranges, 5, "Ordered 5 Oranges..."), 20 | ] 21 | 22 | 23 | @pytest.mark.asyncio 24 | @pytest.mark.parametrize( 25 | "activity_func, order_amount, expected_result", activity_test_data 26 | ) 27 | async def test_order_fruit(activity_func, order_amount, expected_result): 28 | activity_environment = ActivityEnvironment() 29 | 30 | result = await activity_environment.run(activity_func, order_amount) 31 | 32 | assert result == expected_result 33 | -------------------------------------------------------------------------------- /tests/hello/hello_activity_test.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | from temporalio import activity 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from hello.hello_activity import ( 8 | ComposeGreetingInput, 9 | GreetingWorkflow, 10 | compose_greeting, 11 | ) 12 | 13 | 14 | async def test_execute_workflow(client: Client): 15 | task_queue_name = str(uuid.uuid4()) 16 | 17 | async with Worker( 18 | client, 19 | task_queue=task_queue_name, 20 | workflows=[GreetingWorkflow], 21 | activities=[compose_greeting], 22 | ): 23 | assert "Hello, World!" == await client.execute_workflow( 24 | GreetingWorkflow.run, 25 | "World", 26 | id=str(uuid.uuid4()), 27 | task_queue=task_queue_name, 28 | ) 29 | 30 | 31 | @activity.defn(name="compose_greeting") 32 | async def compose_greeting_mocked(input: ComposeGreetingInput) -> str: 33 | return f"{input.greeting}, {input.name} from mocked activity!" 34 | 35 | 36 | async def test_mock_activity(client: Client): 37 | task_queue_name = str(uuid.uuid4()) 38 | async with Worker( 39 | client, 40 | task_queue=task_queue_name, 41 | workflows=[GreetingWorkflow], 42 | activities=[compose_greeting_mocked], 43 | ): 44 | assert "Hello, World from mocked activity!" == await client.execute_workflow( 45 | GreetingWorkflow.run, 46 | "World", 47 | id=str(uuid.uuid4()), 48 | task_queue=task_queue_name, 49 | ) 50 | -------------------------------------------------------------------------------- /tests/hello/hello_cancellation_test.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import uuid 3 | 4 | import pytest 5 | from temporalio.client import Client, WorkflowExecutionStatus, WorkflowFailureError 6 | from temporalio.exceptions import CancelledError 7 | from temporalio.worker import Worker 8 | 9 | from hello.hello_cancellation import ( 10 | CancellationWorkflow, 11 | cleanup_activity, 12 | never_complete_activity, 13 | ) 14 | 15 | 16 | async def test_cancel_workflow(client: Client): 17 | task_queue_name = str(uuid.uuid4()) 18 | 19 | async with Worker( 20 | client, 21 | task_queue=task_queue_name, 22 | workflows=[CancellationWorkflow], 23 | activities=[cleanup_activity, never_complete_activity], 24 | ): 25 | handle = await client.start_workflow( 26 | CancellationWorkflow.run, 27 | id=(str(uuid.uuid4())), 28 | task_queue=task_queue_name, 29 | ) 30 | 31 | await asyncio.wait_for( 32 | wait_for_activity_to_start("never_complete_activity", handle), 33 | timeout=5, 34 | ) 35 | 36 | await handle.cancel() 37 | 38 | with pytest.raises(WorkflowFailureError) as err: 39 | await handle.result() 40 | assert isinstance(err.value.cause, CancelledError) 41 | 42 | assert WorkflowExecutionStatus.CANCELED == (await handle.describe()).status 43 | 44 | 45 | async def wait_for_activity_to_start(activity_name, handle): 46 | while not (await has_activity_started(activity_name, handle)): 47 | await asyncio.sleep(0.2) 48 | 49 | 50 | async def has_activity_started(activity_name, handle): 51 | pending_activities = (await handle.describe()).raw_description.pending_activities 52 | for pending_activity in pending_activities: 53 | if pending_activity.activity_type.name == activity_name: 54 | return True 55 | 56 | return False 57 | -------------------------------------------------------------------------------- /tests/hello/hello_child_test.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | from temporalio import workflow 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from hello.hello_child_workflow import ( 8 | ComposeGreetingInput, 9 | ComposeGreetingWorkflow, 10 | GreetingWorkflow, 11 | ) 12 | 13 | 14 | async def test_child_workflow(client: Client): 15 | task_queue_name = str(uuid.uuid4()) 16 | async with Worker( 17 | client, 18 | task_queue=task_queue_name, 19 | workflows=[GreetingWorkflow, ComposeGreetingWorkflow], 20 | ): 21 | assert "Hello, World!" == await client.execute_workflow( 22 | GreetingWorkflow.run, 23 | "World", 24 | id=str(uuid.uuid4()), 25 | task_queue=task_queue_name, 26 | ) 27 | 28 | 29 | @workflow.defn(name="ComposeGreetingWorkflow") 30 | class MockedComposeGreetingWorkflow: 31 | @workflow.run 32 | async def run(self, input: ComposeGreetingInput) -> str: 33 | return f"{input.greeting}, {input.name} from mocked child!" 34 | 35 | 36 | async def test_mock_child_workflow(client: Client): 37 | task_queue_name = str(uuid.uuid4()) 38 | async with Worker( 39 | client, 40 | task_queue=task_queue_name, 41 | workflows=[GreetingWorkflow, MockedComposeGreetingWorkflow], 42 | ): 43 | assert "Hello, World from mocked child!" == await client.execute_workflow( 44 | GreetingWorkflow.run, 45 | "World", 46 | id=str(uuid.uuid4()), 47 | task_queue=task_queue_name, 48 | ) 49 | -------------------------------------------------------------------------------- /tests/hello/hello_query_test.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | from temporalio.testing import WorkflowEnvironment 4 | from temporalio.worker import Worker 5 | 6 | from hello.hello_query import GreetingWorkflow 7 | 8 | 9 | async def test_query_workflow(): 10 | task_queue_name = str(uuid.uuid4()) 11 | # start manual time skipping 12 | async with await WorkflowEnvironment.start_time_skipping() as env: 13 | async with Worker( 14 | env.client, task_queue=task_queue_name, workflows=[GreetingWorkflow] 15 | ): 16 | handle = await env.client.start_workflow( 17 | GreetingWorkflow.run, 18 | "World", 19 | id=str(uuid.uuid4()), 20 | task_queue=task_queue_name, 21 | ) 22 | 23 | assert "Hello, World!" == await handle.query(GreetingWorkflow.greeting) 24 | # manually skip 3 seconds. This will allow the workflow execution to move forward 25 | await env.sleep(3) 26 | assert "Goodbye, World!" == await handle.query(GreetingWorkflow.greeting) 27 | -------------------------------------------------------------------------------- /tests/hello/hello_signal_test.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | from temporalio.client import Client, WorkflowExecutionStatus 4 | from temporalio.worker import Worker 5 | 6 | from hello.hello_signal import GreetingWorkflow 7 | 8 | 9 | async def test_signal_workflow(client: Client): 10 | task_queue_name = str(uuid.uuid4()) 11 | async with Worker(client, task_queue=task_queue_name, workflows=[GreetingWorkflow]): 12 | handle = await client.start_workflow( 13 | GreetingWorkflow.run, id=str(uuid.uuid4()), task_queue=task_queue_name 14 | ) 15 | 16 | await handle.signal(GreetingWorkflow.submit_greeting, "user1") 17 | await handle.signal(GreetingWorkflow.submit_greeting, "user2") 18 | assert WorkflowExecutionStatus.RUNNING == (await handle.describe()).status 19 | 20 | await handle.signal(GreetingWorkflow.exit) 21 | assert ["Hello, user1", "Hello, user2"] == await handle.result() 22 | assert WorkflowExecutionStatus.COMPLETED == (await handle.describe()).status 23 | -------------------------------------------------------------------------------- /tests/hello/hello_update_test.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | import pytest 4 | from temporalio.client import Client, WorkflowExecutionStatus 5 | from temporalio.testing import WorkflowEnvironment 6 | from temporalio.worker import Worker 7 | 8 | from hello.hello_update import GreetingWorkflow 9 | 10 | 11 | async def test_update_workflow(client: Client, env: WorkflowEnvironment): 12 | task_queue_name = str(uuid.uuid4()) 13 | async with Worker(client, task_queue=task_queue_name, workflows=[GreetingWorkflow]): 14 | handle = await client.start_workflow( 15 | GreetingWorkflow.run, id=str(uuid.uuid4()), task_queue=task_queue_name 16 | ) 17 | 18 | assert WorkflowExecutionStatus.RUNNING == (await handle.describe()).status 19 | 20 | update_result = await handle.execute_update( 21 | GreetingWorkflow.update_workflow_status 22 | ) 23 | assert "Workflow status updated" == update_result 24 | assert "Hello, World!" == (await handle.result()) 25 | assert WorkflowExecutionStatus.COMPLETED == (await handle.describe()).status 26 | -------------------------------------------------------------------------------- /tests/polling/infrequent/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/polling/infrequent/__init__.py -------------------------------------------------------------------------------- /tests/polling/infrequent/workflow_test.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | import pytest 4 | from temporalio.client import Client 5 | from temporalio.testing import WorkflowEnvironment 6 | from temporalio.worker import Worker 7 | 8 | from polling.infrequent.activities import compose_greeting 9 | from polling.infrequent.workflows import GreetingWorkflow 10 | 11 | 12 | async def test_infrequent_polling_workflow(client: Client, env: WorkflowEnvironment): 13 | if not env.supports_time_skipping: 14 | pytest.skip("Too slow to test with time-skipping disabled") 15 | 16 | # Start a worker that hosts the workflow and activity implementations. 17 | task_queue = f"tq-{uuid.uuid4()}" 18 | async with Worker( 19 | client, 20 | task_queue=task_queue, 21 | workflows=[GreetingWorkflow], 22 | activities=[compose_greeting], 23 | ): 24 | handle = await client.start_workflow( 25 | GreetingWorkflow.run, 26 | "Temporal", 27 | id=f"infrequent-polling-{uuid.uuid4()}", 28 | task_queue=task_queue, 29 | ) 30 | result = await handle.result() 31 | assert result == "Hello, Temporal!" 32 | -------------------------------------------------------------------------------- /tests/pydantic_converter/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/pydantic_converter/__init__.py -------------------------------------------------------------------------------- /tests/pydantic_converter/workflow_test.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from datetime import datetime 3 | from ipaddress import IPv4Address 4 | 5 | from temporalio.client import Client 6 | from temporalio.contrib.pydantic import pydantic_data_converter 7 | from temporalio.worker import Worker 8 | 9 | from pydantic_converter.worker import MyPydanticModel, MyWorkflow, my_activity 10 | 11 | 12 | async def test_workflow_with_pydantic_model(client: Client): 13 | # Replace data converter in client 14 | new_config = client.config() 15 | new_config["data_converter"] = pydantic_data_converter 16 | client = Client(**new_config) 17 | task_queue_name = str(uuid.uuid4()) 18 | 19 | orig_models = [ 20 | MyPydanticModel( 21 | some_ip=IPv4Address("127.0.0.1"), some_date=datetime(2000, 1, 2, 3, 4, 5) 22 | ), 23 | MyPydanticModel( 24 | some_ip=IPv4Address("127.0.0.2"), some_date=datetime(2001, 2, 3, 4, 5, 6) 25 | ), 26 | ] 27 | 28 | async with Worker( 29 | client, 30 | task_queue=task_queue_name, 31 | workflows=[MyWorkflow], 32 | activities=[my_activity], 33 | ): 34 | result = await client.execute_workflow( 35 | MyWorkflow.run, 36 | orig_models, 37 | id=str(uuid.uuid4()), 38 | task_queue=task_queue_name, 39 | ) 40 | assert orig_models == result 41 | -------------------------------------------------------------------------------- /tests/pydantic_converter_v1/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/pydantic_converter_v1/__init__.py -------------------------------------------------------------------------------- /tests/pydantic_converter_v1/workflow_test.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from datetime import datetime 3 | from ipaddress import IPv4Address 4 | 5 | from temporalio.client import Client 6 | from temporalio.worker import Worker 7 | 8 | from pydantic_converter_v1.converter import pydantic_data_converter 9 | from pydantic_converter_v1.worker import ( 10 | MyPydanticModel, 11 | MyWorkflow, 12 | my_activity, 13 | new_sandbox_runner, 14 | ) 15 | 16 | 17 | async def test_workflow_with_pydantic_model(client: Client): 18 | # Replace data converter in client 19 | new_config = client.config() 20 | new_config["data_converter"] = pydantic_data_converter 21 | client = Client(**new_config) 22 | task_queue_name = str(uuid.uuid4()) 23 | 24 | orig_models = [ 25 | MyPydanticModel( 26 | some_ip=IPv4Address("127.0.0.1"), some_date=datetime(2000, 1, 2, 3, 4, 5) 27 | ), 28 | MyPydanticModel( 29 | some_ip=IPv4Address("127.0.0.2"), some_date=datetime(2001, 2, 3, 4, 5, 6) 30 | ), 31 | ] 32 | 33 | async with Worker( 34 | client, 35 | task_queue=task_queue_name, 36 | workflows=[MyWorkflow], 37 | activities=[my_activity], 38 | workflow_runner=new_sandbox_runner(), 39 | ): 40 | result = await client.execute_workflow( 41 | MyWorkflow.run, 42 | orig_models, 43 | id=str(uuid.uuid4()), 44 | task_queue=task_queue_name, 45 | ) 46 | assert orig_models == result 47 | -------------------------------------------------------------------------------- /tests/resource_pool/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/resource_pool/__init__.py -------------------------------------------------------------------------------- /tests/sleep_for_days/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/sleep_for_days/__init__.py -------------------------------------------------------------------------------- /tests/trio_async/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/trio_async/__init__.py -------------------------------------------------------------------------------- /tests/trio_async/workflow_test.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | import trio_asyncio 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from trio_async import activities, workflows 8 | 9 | 10 | async def test_workflow_with_trio(client: Client): 11 | @trio_asyncio.aio_as_trio 12 | async def inside_trio(client: Client) -> list[str]: 13 | # Create Trio thread executor 14 | with trio_asyncio.TrioExecutor(max_workers=200) as thread_executor: 15 | task_queue = f"tq-{uuid.uuid4()}" 16 | # Run worker 17 | async with Worker( 18 | client, 19 | task_queue=task_queue, 20 | activities=[ 21 | activities.say_hello_activity_async, 22 | activities.say_hello_activity_sync, 23 | ], 24 | workflows=[workflows.SayHelloWorkflow], 25 | activity_executor=thread_executor, 26 | workflow_task_executor=thread_executor, 27 | ): 28 | # Run workflow and return result 29 | return await client.execute_workflow( 30 | workflows.SayHelloWorkflow.run, 31 | "some-user", 32 | id=f"wf-{uuid.uuid4()}", 33 | task_queue=task_queue, 34 | ) 35 | 36 | result = trio_asyncio.run(inside_trio, client) 37 | assert result == [ 38 | "Hello, some-user! (from asyncio)", 39 | "Hello, some-user! (from thread)", 40 | ] 41 | -------------------------------------------------------------------------------- /tests/updatable_timer/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/tests/updatable_timer/__init__.py -------------------------------------------------------------------------------- /tests/updatable_timer/updatable_timer_test.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | import math 4 | import uuid 5 | 6 | from temporalio.client import Client, WorkflowExecutionStatus 7 | from temporalio.testing import WorkflowEnvironment 8 | from temporalio.worker import Worker 9 | 10 | from updatable_timer.workflow import Workflow 11 | 12 | 13 | async def test_updatable_timer_workflow(client: Client): 14 | logging.basicConfig(level=logging.DEBUG) 15 | 16 | task_queue_name = str(uuid.uuid4()) 17 | async with await WorkflowEnvironment.start_time_skipping() as env: 18 | async with Worker(env.client, task_queue=task_queue_name, workflows=[Workflow]): 19 | in_a_day = float( 20 | (datetime.datetime.now() + datetime.timedelta(days=1)).timestamp() 21 | ) 22 | in_an_hour = float( 23 | (datetime.datetime.now() + datetime.timedelta(hours=1)).timestamp() 24 | ) 25 | handle = await env.client.start_workflow( 26 | Workflow.run, in_a_day, id=str(uuid.uuid4()), task_queue=task_queue_name 27 | ) 28 | wake_up_time1 = await handle.query(Workflow.get_wake_up_time) 29 | assert math.isclose(wake_up_time1, in_a_day) 30 | await handle.signal(Workflow.update_wake_up_time, in_an_hour) 31 | wake_up_time2 = await handle.query(Workflow.get_wake_up_time) 32 | assert math.isclose(wake_up_time2, in_an_hour) 33 | await handle.result() 34 | -------------------------------------------------------------------------------- /trio_async/README.md: -------------------------------------------------------------------------------- 1 | # Trio Async Sample 2 | 3 | This sample shows how to use Temporal asyncio with [Trio](https://trio.readthedocs.io) using 4 | [Trio asyncio](https://trio-asyncio.readthedocs.io). Specifically it demonstrates using a traditional Temporal client 5 | and worker in a Trio setting, and how Trio-based code can run in both asyncio async activities and threaded sync 6 | activities. 7 | 8 | For this sample, the optional `trio_async` dependency group must be included. To include, run: 9 | 10 | uv sync --group trio_async 11 | 12 | To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory to start the 13 | worker: 14 | 15 | uv run worker.py 16 | 17 | This will start the worker. Then, in another terminal, run the following to execute the workflow: 18 | 19 | uv run starter.py 20 | 21 | The starter should complete with: 22 | 23 | INFO:root:Workflow result: ['Hello, Temporal! (from asyncio)', 'Hello, Temporal! (from thread)'] -------------------------------------------------------------------------------- /trio_async/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/trio_async/__init__.py -------------------------------------------------------------------------------- /trio_async/activities.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import time 3 | 4 | import trio 5 | import trio_asyncio 6 | from temporalio import activity 7 | 8 | 9 | # An asyncio-based async activity 10 | @activity.defn 11 | async def say_hello_activity_async(name: str) -> str: 12 | # Demonstrate a sleep in both asyncio and Trio, showing that both asyncio 13 | # and Trio primitives can be used 14 | 15 | # First asyncio 16 | activity.logger.info("Sleeping in asyncio") 17 | await asyncio.sleep(0.1) 18 | 19 | # Now Trio. We have to invoke the function separately decorated. 20 | # We cannot use the @trio_as_aio decorator on the activity itself because 21 | # it doesn't use functools wrap or similar so it doesn't respond to things 22 | # like __name__ that @activity.defn needs. 23 | return await say_hello_in_trio_from_asyncio(name) 24 | 25 | 26 | @trio_asyncio.trio_as_aio 27 | async def say_hello_in_trio_from_asyncio(name: str) -> str: 28 | activity.logger.info("Sleeping in Trio (from asyncio)") 29 | await trio.sleep(0.1) 30 | return f"Hello, {name}! (from asyncio)" 31 | 32 | 33 | # A thread-based sync activity 34 | @activity.defn 35 | def say_hello_activity_sync(name: str) -> str: 36 | # Demonstrate a sleep in both threaded and Trio, showing that both 37 | # primitives can be used 38 | 39 | # First, thread-blocking 40 | activity.logger.info("Sleeping normally") 41 | time.sleep(0.1) 42 | 43 | # Now Trio. We have to use Trio's thread sync tools to run trio calls from 44 | # a different thread. 45 | return trio.from_thread.run(say_hello_in_trio_from_sync, name) 46 | 47 | 48 | async def say_hello_in_trio_from_sync(name: str) -> str: 49 | activity.logger.info("Sleeping in Trio (from thread)") 50 | await trio.sleep(0.1) 51 | return f"Hello, {name}! (from thread)" 52 | -------------------------------------------------------------------------------- /trio_async/starter.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import trio_asyncio 4 | from temporalio.client import Client 5 | 6 | from trio_async import workflows 7 | 8 | 9 | @trio_asyncio.aio_as_trio # Note this decorator which allows asyncio primitives 10 | async def main(): 11 | logging.basicConfig(level=logging.INFO) 12 | 13 | # Connect client 14 | client = await Client.connect("localhost:7233") 15 | 16 | # Execute the workflow 17 | result = await client.execute_workflow( 18 | workflows.SayHelloWorkflow.run, 19 | "Temporal", 20 | id=f"trio-async-workflow-id", 21 | task_queue="trio-async-task-queue", 22 | ) 23 | logging.info(f"Workflow result: {result}") 24 | 25 | 26 | if __name__ == "__main__": 27 | # Note how we're using Trio event loop, not asyncio 28 | trio_asyncio.run(main) 29 | -------------------------------------------------------------------------------- /trio_async/workflows.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | 5 | with workflow.unsafe.imports_passed_through(): 6 | from trio_async.activities import say_hello_activity_async, say_hello_activity_sync 7 | 8 | 9 | @workflow.defn 10 | class SayHelloWorkflow: 11 | @workflow.run 12 | async def run(self, name: str) -> list[str]: 13 | # Workflows don't use default asyncio event loop or Trio, they use a 14 | # custom event loop. Therefore Trio primitives should never be used in a 15 | # workflow, only asyncio helpers (which delegate to the custom loop). 16 | return [ 17 | # That these are two different activities for async or sync means 18 | # nothing to the workflow, we just have both to demonstrate the 19 | # activity side 20 | await workflow.execute_activity( 21 | say_hello_activity_async, 22 | name, 23 | start_to_close_timeout=timedelta(minutes=5), 24 | ), 25 | await workflow.execute_activity( 26 | say_hello_activity_sync, 27 | name, 28 | start_to_close_timeout=timedelta(minutes=5), 29 | ), 30 | ] 31 | -------------------------------------------------------------------------------- /updatable_timer/README.md: -------------------------------------------------------------------------------- 1 | # Updatable Timer Sample 2 | 3 | Demonstrates a helper class which relies on `workflow.wait_condition` to implement a blocking sleep that can be updated at any moment. 4 | 5 | The sample is composed of the three executables: 6 | 7 | * `worker.py` hosts the Workflow Executions. 8 | * `starter.py` starts Workflow Executions. 9 | * `wake_up_timer_updater.py` Signals the Workflow Execution with the new time to wake up. 10 | 11 | First start the Worker: 12 | 13 | ```bash 14 | uv run worker.py 15 | ``` 16 | Check the output of the Worker window. The expected output is: 17 | 18 | ``` 19 | Worker started, ctrl+c to exit 20 | ``` 21 | 22 | Then in a different terminal window start the Workflow Execution: 23 | 24 | ```bash 25 | uv run starter.py 26 | ``` 27 | Check the output of the Worker window. The expected output is: 28 | ``` 29 | Workflow started: run_id=... 30 | ``` 31 | 32 | Then run the updater as many times as you want to change timer to 10 seconds from now: 33 | 34 | ```bash 35 | uv run wake_up_time_updater.py 36 | ``` 37 | 38 | Check the output of the worker window. The expected output is: 39 | 40 | ``` 41 | Updated wake up time to 10 seconds from now 42 | ``` -------------------------------------------------------------------------------- /updatable_timer/__init__.py: -------------------------------------------------------------------------------- 1 | TASK_QUEUE = "updatable-timer" 2 | -------------------------------------------------------------------------------- /updatable_timer/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from datetime import datetime, timedelta 4 | from typing import Optional 5 | 6 | from temporalio import exceptions 7 | from temporalio.client import Client 8 | 9 | from updatable_timer import TASK_QUEUE 10 | from updatable_timer.workflow import Workflow 11 | 12 | 13 | async def main(client: Optional[Client] = None): 14 | logging.basicConfig(level=logging.INFO) 15 | 16 | client = client or await Client.connect("localhost:7233") 17 | try: 18 | handle = await client.start_workflow( 19 | Workflow.run, 20 | (datetime.now() + timedelta(days=1)).timestamp(), 21 | id=f"updatable-timer-workflow", 22 | task_queue=TASK_QUEUE, 23 | ) 24 | logging.info(f"Workflow started: run_id={handle.result_run_id}") 25 | except exceptions.WorkflowAlreadyStartedError as e: 26 | logging.info( 27 | f"Workflow already running: workflow_id={e.workflow_id}, run_id={e.run_id}" 28 | ) 29 | 30 | 31 | if __name__ == "__main__": 32 | asyncio.run(main()) 33 | -------------------------------------------------------------------------------- /updatable_timer/updatable_timer_lib.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from datetime import datetime, timedelta 3 | 4 | from temporalio import workflow 5 | 6 | 7 | class UpdatableTimer: 8 | def __init__(self, wake_up_time: datetime) -> None: 9 | self.wake_up_time = wake_up_time 10 | self.wake_up_time_updated = False 11 | 12 | async def sleep(self) -> None: 13 | workflow.logger.info(f"sleep_until: {self.wake_up_time}") 14 | while True: 15 | now = workflow.now() 16 | 17 | sleep_interval = self.wake_up_time - now 18 | if sleep_interval <= timedelta(0): 19 | break 20 | workflow.logger.info(f"Going to sleep for {sleep_interval}") 21 | 22 | try: 23 | self.wake_up_time_updated = False 24 | await workflow.wait_condition( 25 | lambda: self.wake_up_time_updated, 26 | timeout=sleep_interval, 27 | ) 28 | except asyncio.TimeoutError: 29 | # checks condition at the beginning of the loop 30 | continue 31 | workflow.logger.info(f"sleep_until completed") 32 | 33 | def update_wake_up_time(self, wake_up_time: datetime) -> None: 34 | workflow.logger.info(f"update_wake_up_time: {wake_up_time}") 35 | self.wake_up_time = wake_up_time 36 | self.wake_up_time_updated = True 37 | 38 | def get_wake_up_time(self) -> datetime: 39 | return self.wake_up_time 40 | -------------------------------------------------------------------------------- /updatable_timer/wake_up_time_updater.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from datetime import datetime, timedelta 4 | from typing import Optional 5 | 6 | from temporalio.client import Client 7 | 8 | from updatable_timer.workflow import Workflow 9 | 10 | 11 | async def main(client: Optional[Client] = None): 12 | logging.basicConfig(level=logging.INFO) 13 | 14 | client = client or await Client.connect("localhost:7233") 15 | handle = client.get_workflow_handle(workflow_id="updatable-timer-workflow") 16 | # signal workflow about the wake up time change 17 | await handle.signal( 18 | Workflow.update_wake_up_time, 19 | (datetime.now() + timedelta(seconds=10)).timestamp(), 20 | ) 21 | 22 | logging.info("Updated wake up time to 10 seconds from now") 23 | 24 | 25 | if __name__ == "__main__": 26 | asyncio.run(main()) 27 | -------------------------------------------------------------------------------- /updatable_timer/worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from temporalio.client import Client 5 | from temporalio.worker import Worker 6 | 7 | from updatable_timer import TASK_QUEUE 8 | from updatable_timer.workflow import Workflow 9 | 10 | interrupt_event = asyncio.Event() 11 | 12 | 13 | async def main(): 14 | logging.basicConfig(level=logging.INFO) 15 | 16 | client = await Client.connect("localhost:7233") 17 | async with Worker( 18 | client, 19 | task_queue=TASK_QUEUE, 20 | workflows=[Workflow], 21 | ): 22 | logging.info("Worker started, ctrl+c to exit") 23 | # Wait until interrupted 24 | await interrupt_event.wait() 25 | logging.info("Interrupt received, shutting down...") 26 | 27 | 28 | if __name__ == "__main__": 29 | loop = asyncio.new_event_loop() 30 | asyncio.set_event_loop(loop) 31 | try: 32 | loop.run_until_complete(main()) 33 | except KeyboardInterrupt: 34 | interrupt_event.set() 35 | loop.run_until_complete(loop.shutdown_asyncgens()) 36 | -------------------------------------------------------------------------------- /updatable_timer/workflow.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | from typing import Optional 3 | 4 | from temporalio import workflow 5 | 6 | from updatable_timer.updatable_timer_lib import UpdatableTimer 7 | 8 | 9 | @workflow.defn 10 | class Workflow: 11 | @workflow.init 12 | def __init__(self, wake_up_time: float) -> None: 13 | self.timer = UpdatableTimer( 14 | datetime.fromtimestamp(wake_up_time, tz=timezone.utc) 15 | ) 16 | 17 | @workflow.run 18 | async def run(self, wake_up_time: float): 19 | await self.timer.sleep() 20 | 21 | @workflow.signal 22 | async def update_wake_up_time(self, wake_up_time: float) -> None: 23 | workflow.logger.info(f"update_wake_up_time: {wake_up_time}") 24 | 25 | self.timer.update_wake_up_time( 26 | datetime.fromtimestamp(wake_up_time, tz=timezone.utc) 27 | ) 28 | 29 | @workflow.query 30 | def get_wake_up_time(self) -> float: 31 | workflow.logger.info(f"get_wake_up_time") 32 | return float(self.timer.get_wake_up_time().timestamp()) 33 | -------------------------------------------------------------------------------- /worker_specific_task_queues/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/worker_specific_task_queues/__init__.py -------------------------------------------------------------------------------- /worker_specific_task_queues/demo_fs/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore -------------------------------------------------------------------------------- /worker_specific_task_queues/starter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from uuid import uuid4 3 | 4 | from temporalio.client import Client 5 | 6 | from worker_specific_task_queues.tasks import FileProcessing 7 | 8 | 9 | async def main(): 10 | # Connect client 11 | client = await Client.connect("localhost:7233") 12 | 13 | # Start 10 concurrent workflows 14 | futures = [] 15 | for idx in range(10): 16 | result = client.execute_workflow( 17 | FileProcessing.run, 18 | id=f"worker_specific_task_queue-workflow-id-{idx}", 19 | task_queue="worker_specific_task_queue-distribution-queue", 20 | ) 21 | await asyncio.sleep(0.1) 22 | futures.append(result) 23 | 24 | checksums = await asyncio.gather(*futures) 25 | print("\n".join([f"Output checksums:"] + checksums)) 26 | 27 | 28 | if __name__ == "__main__": 29 | asyncio.run(main()) 30 | -------------------------------------------------------------------------------- /worker_specific_task_queues/static/all-activitites-on-same-task-queue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/worker_specific_task_queues/static/all-activitites-on-same-task-queue.png -------------------------------------------------------------------------------- /worker_versioning/README.md: -------------------------------------------------------------------------------- 1 | # Worker Versioning Sample 2 | 3 | This sample shows you how you can use the [Worker Versioning](https://docs.temporal.io/workers#worker-versioning) 4 | feature to deploy incompatible changes to workflow code more easily. 5 | 6 | To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory: 7 | 8 | uv run example.py 9 | 10 | This will add some Build IDs to a Task Queue, and will also run Workers with those versions to show how you can 11 | mark add versions, mark them as compatible (or not) with one another, and run Workers at specific versions. You'll 12 | see that only the workers only process Workflow Tasks assigned versions they are compatible with. 13 | -------------------------------------------------------------------------------- /worker_versioning/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/temporalio/samples-python/5d640657882d1e2a6eb9fa1d76a8946eaf768e09/worker_versioning/__init__.py -------------------------------------------------------------------------------- /worker_versioning/activities.py: -------------------------------------------------------------------------------- 1 | from temporalio import activity 2 | 3 | 4 | @activity.defn 5 | async def greet(inp: str) -> str: 6 | return f"Hi from {inp}" 7 | 8 | 9 | @activity.defn 10 | async def super_greet(inp: str, some_number: int) -> str: 11 | return f"Hi from {inp} with {some_number}" 12 | -------------------------------------------------------------------------------- /worker_versioning/workflow_v1.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | 5 | with workflow.unsafe.imports_passed_through(): 6 | from worker_versioning.activities import greet 7 | 8 | 9 | @workflow.defn 10 | class MyWorkflow: 11 | """The 1.0 version of the workflow we'll be making changes to""" 12 | 13 | should_finish: bool = False 14 | 15 | @workflow.run 16 | async def run(self) -> str: 17 | workflow.logger.info("Running workflow V1") 18 | await workflow.wait_condition(lambda: self.should_finish) 19 | return "Concluded workflow on V1" 20 | 21 | @workflow.signal 22 | async def proceeder(self, inp: str): 23 | await workflow.execute_activity( 24 | greet, "V1", start_to_close_timeout=timedelta(seconds=5) 25 | ) 26 | if inp == "finish": 27 | self.should_finish = True 28 | -------------------------------------------------------------------------------- /worker_versioning/workflow_v1_1.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from temporalio import workflow 4 | 5 | with workflow.unsafe.imports_passed_through(): 6 | from worker_versioning.activities import greet, super_greet 7 | 8 | 9 | @workflow.defn 10 | class MyWorkflow: 11 | """ 12 | The 1.1 version of the workflow, which is compatible with the first version. 13 | 14 | The compatible changes we've made are: 15 | - Altering the log lines 16 | - Using the `patched` API to properly introduce branching behavior while maintaining 17 | compatibility 18 | """ 19 | 20 | should_finish: bool = False 21 | 22 | @workflow.run 23 | async def run(self) -> str: 24 | workflow.logger.info("Running workflow V1.1") 25 | await workflow.wait_condition(lambda: self.should_finish) 26 | return "Concluded workflow on V1.1" 27 | 28 | @workflow.signal 29 | async def proceeder(self, inp: str): 30 | if workflow.patched("different-activity"): 31 | await workflow.execute_activity( 32 | super_greet, 33 | args=["V1.1", 100], 34 | start_to_close_timeout=timedelta(seconds=5), 35 | ) 36 | else: 37 | # Note it is a valid compatible change to alter the input to an activity. However, because 38 | # we're using the patched API, this branch would only be taken if the workflow was started on 39 | # a v1 worker. 40 | await workflow.execute_activity( 41 | greet, "V1.1", start_to_close_timeout=timedelta(seconds=5) 42 | ) 43 | 44 | if inp == "finish": 45 | self.should_finish = True 46 | -------------------------------------------------------------------------------- /worker_versioning/workflow_v2.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from datetime import timedelta 3 | 4 | from temporalio import workflow 5 | 6 | with workflow.unsafe.imports_passed_through(): 7 | from worker_versioning.activities import greet 8 | 9 | 10 | @workflow.defn 11 | class MyWorkflow: 12 | """ 13 | The 2.0 version of the workflow, which is fully incompatible with the other workflows, since it 14 | alters the sequence of commands without using `patched`. 15 | """ 16 | 17 | should_finish: bool = False 18 | 19 | @workflow.run 20 | async def run(self) -> str: 21 | workflow.logger.info("Running workflow V2") 22 | await workflow.wait_condition(lambda: self.should_finish) 23 | return "Concluded workflow on V2" 24 | 25 | @workflow.signal 26 | async def proceeder(self, inp: str): 27 | await asyncio.sleep(1) 28 | await workflow.execute_activity( 29 | greet, "V2", start_to_close_timeout=timedelta(seconds=5) 30 | ) 31 | await workflow.execute_activity( 32 | greet, "V2", start_to_close_timeout=timedelta(seconds=5) 33 | ) 34 | 35 | if inp == "finish": 36 | self.should_finish = True 37 | --------------------------------------------------------------------------------