diff --git a/.circleci/config.yml b/.circleci/config.yml
deleted file mode 100644
index cd84ef34..00000000
--- a/.circleci/config.yml
+++ /dev/null
@@ -1,73 +0,0 @@
-version: 2.1
-orbs:
- ruby: circleci/ruby@0.1.2
-
-jobs:
- test_gem:
- docker:
- - image: circleci/ruby:2.6.3-stretch-node
- executor: ruby/default
- steps:
- - checkout
- - ruby/bundle-install
- - run:
- name: Run RSpec
- command: bundle exec rspec
-
- test_examples:
- docker:
- - image: circleci/ruby:2.6.3-stretch-node
- - image: circleci/postgres:alpine
- name: postgres
- environment:
- POSTGRES_PASSWORD: temporal
- - image: temporalio/auto-setup:latest
- name: temporal
- environment:
- - DB=postgresql
- - DB_PORT=5432
- - POSTGRES_USER=postgres
- - POSTGRES_PWD=temporal
- - POSTGRES_SEEDS=postgres
- - DYNAMIC_CONFIG_FILE_PATH=config/dynamicconfig/development.yaml
-
- environment:
- - TEMPORAL_HOST=temporal
-
- steps:
- - checkout
-
- - run:
- name: Bundle Install
- command: cd examples && bundle install --path vendor/bundle
-
- - run:
- name: Register Namespace
- command: cd examples && bin/register_namespace ruby-samples
-
- - run:
- name: Wait for Namespace to settle
- command: sleep 15
-
- - run:
- name: Boot up worker
- command: cd examples && bin/worker
- background: true
-
- - run:
- name: Boot up crypt worker
- command: cd examples && bin/worker
- background: true
- environment:
- USE_ENCRYPTION: 1
-
- - run:
- name: Run RSpec
- command: cd examples && bundle exec rspec
-
-workflows:
- version: 2
- test:
- jobs:
- - test_gem
- - test_examples
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
new file mode 100644
index 00000000..022d8b57
--- /dev/null
+++ b/.github/workflows/tests.yml
@@ -0,0 +1,85 @@
+name: Tests
+
+on:
+ push:
+ branches: [ "master" ]
+ pull_request:
+ branches: [ "master" ]
+
+jobs:
+ test_gem:
+ runs-on: ubuntu-latest
+
+ strategy:
+ fail-fast: false
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Set up Ruby
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: 3.0.3
+ bundler-cache: true
+
+ - name: Run tests
+ run: |
+ bundle exec rspec
+
+ test_examples:
+ runs-on: ubuntu-latest
+
+ strategy:
+ fail-fast: false
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Start dependencies
+ run: |
+ docker compose \
+ -f examples/docker-compose.yml \
+ up -d
+
+ - name: Set up Ruby
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: 3.0.3
+
+ - name: Bundle install
+ run: |
+ cd examples && bundle install --path vendor/bundle
+
+ - name: Wait for dependencies to settle
+ run: |
+ sleep 10
+
+ - name: Register namespace
+ run: |
+ cd examples && bin/register_namespace ruby-samples
+
+ - name: Wait for namespace to settle
+ run: |
+ sleep 10
+
+ - name: Boot up worker
+ run: |
+ cd examples && bin/worker &
+
+ - name: Boot up crypt worker
+ env:
+ USE_ENCRYPTION: 1
+ run: |
+ cd examples && bin/worker &
+
+ - name: Boot up worker for v2 error serialization tests
+ env:
+ USE_ERROR_SERIALIZATION_V2: 1
+ run: |
+ cd examples && bin/worker &
+
+ - name: Run RSpec
+ env:
+ USE_ERROR_SERIALIZATION_V2: 1
+ run: |
+ cd examples && bundle exec rspec
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e604aca7..3536f399 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,38 @@
# Changelog
-## 0.0.1
-- First release
+## 0.1.1
+Allows signals to be processed within the first workflow task.
+
+**IMPORTANT:** This change is backward compatible, but workflows started
+on this version cannot run on earlier versions. If you roll back, you will
+see workflow task failures mentioning an unknown SDK flag. This will prevent
+those workflows from making progress until your code is rolled forward
+again. If you'd like to roll this out more gradually, you can,
+1. Set the `no_signals_in_first_task` configuration option to `true`
+2. Deploy your worker
+3. Wait until you are certain you won't need to roll back
+4. Remove the configuration option, which will default it to `false`
+5. Deploy your worker
+
+## 0.1.0
+
+This introduces signal first ordering. See https://github.com/coinbase/temporal-ruby/issues/258 for
+details on why this is necessary for correct handling of signals.
+
+**IMPORTANT: ** This feature requires Temporal server 1.20.0 or newer. If you are running an older
+version of the server, you must either upgrade to at least this version, or you can set the
+`.legacy_signals` configuration option to true until you can upgrade.
+
+If you do not have existing workflows with signals running or are standing up a worker service
+for the first time, you can ignore all the below instructions.
+
+If you have any workflows with signals running during a deployment and run more than one worker
+process, you must follow these rollout steps to avoid non-determinism errors:
+1. Set `.legacy_signals` in `Temporal::Configuration` to true
+2. Deploy your worker
+3. Remove the `.legacy_signals` setting or set it to `false`
+4. Deploy your worker
+
+These steps ensure any workflow that executes in signals first mode will continue to be executed
+in this order on replay. If you don't follow these steps, you may see failed workflow tasks, which
+in some cases could result in unrecoverable history corruption.
diff --git a/Gemfile b/Gemfile
index f960e788..a98c51b0 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,5 +1,5 @@
source 'https://rubygems.org'
-gemspec
+gem 'google-protobuf', '~> 3.19'
-gem 'coveralls', require: false
+gemspec
diff --git a/Makefile b/Makefile
index 078e4e0d..6967cb5b 100644
--- a/Makefile
+++ b/Makefile
@@ -1,4 +1,4 @@
-PROTO_ROOT := proto/temporal
+PROTO_ROOT := proto
PROTO_FILES = $(shell find $(PROTO_ROOT) -name "*.proto")
PROTO_DIRS = $(sort $(dir $(PROTO_FILES)))
PROTO_OUT := lib/gen
@@ -6,4 +6,4 @@ PROTO_OUT := lib/gen
proto:
$(foreach PROTO_DIR,$(PROTO_DIRS),bundle exec grpc_tools_ruby_protoc -Iproto --ruby_out=$(PROTO_OUT) --grpc_out=$(PROTO_OUT) $(PROTO_DIR)*.proto;)
-.PHONY: proto
\ No newline at end of file
+.PHONY: proto
diff --git a/README.md b/README.md
index cc8b0441..a64c1159 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# Ruby worker for Temporal
+# Ruby SDK for Temporal
[![Coverage Status](https://coveralls.io/repos/github/coinbase/temporal-ruby/badge.svg?branch=master)](https://coveralls.io/github/coinbase/temporal-ruby?branch=master)
@@ -6,7 +6,7 @@
A pure Ruby library for defining and running Temporal workflows and activities.
-To find more about Temporal please visit .
+To find more about Temporal itself please visit .
## Getting Started
@@ -14,7 +14,7 @@ To find more about Temporal please visit .
Clone this repository:
```sh
-> git clone git@github.com:coinbase/temporal-ruby.git
+git clone git@github.com:coinbase/temporal-ruby.git
```
Include this gem to your `Gemfile`:
@@ -26,6 +26,7 @@ gem 'temporal-ruby', github: 'coinbase/temporal-ruby'
Define an activity:
```ruby
+require 'temporal-ruby'
class HelloActivity < Temporal::Activity
def execute(name)
puts "Hello #{name}!"
@@ -49,37 +50,55 @@ class HelloWorldWorkflow < Temporal::Workflow
end
```
-Configure your Temporal connection:
+Configure your Temporal connection and register the namespace with the Temporal service:
```ruby
+require 'temporal-ruby'
Temporal.configure do |config|
config.host = 'localhost'
config.port = 7233
config.namespace = 'ruby-samples'
config.task_queue = 'hello-world'
+ config.credentials = :this_channel_is_insecure
end
-```
-
-Register namespace with the Temporal service:
-```ruby
-Temporal.register_namespace('ruby-samples', 'A safe space for playing with Temporal Ruby')
+begin
+ Temporal.register_namespace('ruby-samples', 'A safe space for playing with Temporal Ruby')
+rescue Temporal::NamespaceAlreadyExistsFailure
+ nil # service was already registered
+end
```
-Configure and start your worker process:
+
+Configure and start your worker process in a terminal shell:
```ruby
+require 'path/to/configuration'
require 'temporal/worker'
worker = Temporal::Worker.new
worker.register_workflow(HelloWorldWorkflow)
worker.register_activity(HelloActivity)
-worker.start
+worker.start # runs forever
```
-And finally start your workflow:
+You can add several options when initializing worker (here defaults are provided as values):
```ruby
+Temporal::Worker.new(
+ activity_thread_pool_size: 20, # how many threads poll for activities
+ workflow_thread_pool_size: 10, # how many threads poll for workflows
+ binary_checksum: nil, # identifies the version of workflow worker code
+ activity_poll_retry_seconds: 0, # how many seconds to wait after unsuccessful poll for activities
+ workflow_poll_retry_seconds: 0, # how many seconds to wait after unsuccessful poll for workflows
+ activity_max_tasks_per_second: 0 # rate-limit for starting activity tasks (new activities + retries) on the task queue
+)
+```
+
+And finally start your workflow in another terminal shell:
+
+```ruby
+require 'path/to/configuration'
require 'path/to/hello_world_workflow'
Temporal.start_workflow(HelloWorldWorkflow)
@@ -97,14 +116,110 @@ available, make sure to check them out.
## Installing dependencies
Temporal service handles all the persistence, fault tolerance and coordination of your workflows and
-activities. To set it up locally, download and boot the Docker Compose file from the official repo:
+activities. To set it up locally, download and boot the Docker Compose file from the official repo.
+The Docker Compose file forwards all ports to your localhost so you can interact with
+the containers easily from your shells.
+
+Run:
```sh
-> curl -O https://raw.githubusercontent.com/temporalio/docker-compose/main/docker-compose.yml
+curl -O https://raw.githubusercontent.com/temporalio/docker-compose/main/docker-compose.yml
-> docker-compose up
+docker-compose up
```
+## Using Credentials
+
+### SSL
+
+In many production deployments you will end up connecting to your Temporal Services via SSL. In this
+case you must read the public certificate of the CA that issued your Temporal server's SSL certificate and create
+an instance of [gRPC Channel Credentials](https://grpc.io/docs/guides/auth/#with-server-authentication-ssltls-1).
+
+Configure your Temporal connection:
+
+```ruby
+Temporal.configure do |config|
+ config.host = 'localhost'
+ config.port = 7233
+ config.namespace = 'ruby-samples'
+ config.task_queue = 'hello-world'
+ config.credentials = GRPC::Core::ChannelCredentials.new(root_cert, client_key, client_chain)
+end
+```
+
+### OAuth2 Token
+
+Use gRPC Call Credentials to add OAuth2 token to gRPC calls:
+
+```ruby
+Temporal.configure do |config|
+ config.host = 'localhost'
+ config.port = 7233
+ config.namespace = 'ruby-samples'
+ config.task_queue = 'hello-world'
+ config.credentials = GRPC::Core::CallCredentials.new(updater_proc)
+end
+```
+`updater_proc` should be a method that returns `proc`. See an example of `updater_proc` in [googleauth](https://www.rubydoc.info/gems/googleauth/0.1.0/Signet/OAuth2/Client) library.
+
+### Combining Credentials
+
+To configure both SSL and OAuth2 token cedentials use `compose` method:
+
+```ruby
+Temporal.configure do |config|
+ config.host = 'localhost'
+ config.port = 7233
+ config.namespace = 'ruby-samples'
+ config.task_queue = 'hello-world'
+ config.credentials = GRPC::Core::ChannelCredentials.new(root_cert, client_key, client_chain).compose(
+ GRPC::Core::CallCredentials.new(token.updater_proc)
+ )
+end
+```
+
+## Configuration
+
+This gem is optimised for the smoothest out-of-the-box experience, which is achieved using a global
+configuration:
+
+```ruby
+Temporal.configure do |config|
+ config.host = '127.0.0.1' # sets global host
+ ...
+end
+
+Temporal::Worker.new # uses global host
+Temporal.start_workflow(...) # uses global host
+```
+
+This will work just fine for simpler use-cases, however at some point you might need to setup
+multiple clients and workers within the same instance of your app (e.g. you have different Temporal
+hosts, need to use different codecs/converters for different parts of your app, etc). Should this be
+the case we recommend using explicit local configurations for each client/worker:
+
+```ruby
+config_1 = Temporal::Configuration.new
+config_1.host = 'temporal-01'
+
+config_2 = Temporal::Configuration.new
+config_2.host = 'temporal-01'
+
+worker_1 = Temporal::Worker.new(config_1)
+worker_2 = Temporal::Worker.new(config_2)
+
+client_1 = Temporal::Client.new(config_1)
+client_1.start_workflow(...)
+
+client_2 = Temporal::Client.new(config_2)
+client_2.start_workflow(...)
+```
+
+*NOTE: Almost all the methods on the `Temporal` module are delegated to the default client that's
+initialized using global configuration. The same methods can be used directly on your own client
+instances.*
+
## Workflows
A workflow is defined using pure Ruby code, however it should contain only a high-level
@@ -169,7 +284,7 @@ Besides calling activities workflows can:
- Use timers
- Receive signals
- Execute other (child) workflows
-- Respond to queries [not yet implemented]
+- Respond to queries
## Activities
@@ -327,6 +442,36 @@ arguments are identical to the `Temporal.start_workflow` API.
set it to allow as many invocations as you need. You can also set it to `nil`, which will use a
default value of 10 years.*
+## Middleware
+Middleware sits between the execution of your workflows/activities and the Temporal SDK, allowing you to insert custom code before or after the execution.
+
+### Activity Middleware Stack
+Middleware added to the activity middleware stack will be executed around each activity method. This is useful when you want to perform a certain task before and/or after each activity execution, such as logging, error handling, or measuring execution time.
+
+### Workflow Middleware Stack
+There are actually two types of workflow middleware in Temporal Ruby SDK:
+
+*Workflow Middleware*: This middleware is executed around each entire workflow. This is similar to activity middleware, but for workflows.
+
+*Workflow Task Middleware*: This middleware is executed around each workflow task, of which there will be many for each workflow.
+
+### Example
+To add a middleware, you need to define a class that responds to the call method. Within the call method, you should call yield to allow the next middleware in the stack (or the workflow/activity method itself if there are no more middlewares) to execute. Here's an example:
+
+```
+class MyMiddleware
+ def call(metadata)
+ puts "Before execution"
+ yield
+ puts "After execution"
+ result
+ end
+end
+```
+
+You can add this middleware to the stack like so `worker.add_activity_middleware(MyMiddleware)`
+
+Please note that the order of middleware in the stack matters. The middleware that is added last will be the first one to execute. In the example above, MyMiddleware will execute before any other middleware in the stack.
## Breaking Changes
@@ -430,7 +575,8 @@ Temporal::Testing.local! do
end
```
-Make sure to check out [example integration specs](examples/spec/integration) for more details.
+Make sure to check out [example integration specs](examples/spec/integration) for more details. Instructions
+for running these integration specs can be found in [examples/README.md](examples/README.md).
## TODO
diff --git a/examples/.env b/examples/.env
new file mode 100644
index 00000000..d15cca49
--- /dev/null
+++ b/examples/.env
@@ -0,0 +1 @@
+COMPOSE_PROJECT_NAME=temporal-ruby-examples
diff --git a/examples/Gemfile b/examples/Gemfile
index 9c543b77..c6fc0199 100644
--- a/examples/Gemfile
+++ b/examples/Gemfile
@@ -2,7 +2,8 @@ source 'https://rubygems.org'
gem 'temporal-ruby', path: '../'
-gem 'dry-types', '>= 1.2.0'
-gem 'dry-struct', '~> 1.1.1'
+gem 'dry-types', '>= 1.7.2'
+gem 'dry-struct', '~> 1.6.0'
+gem 'google-protobuf', '~> 3.19'
gem 'rspec', group: :test
diff --git a/examples/README.md b/examples/README.md
index 72697ed6..d7fd468e 100644
--- a/examples/README.md
+++ b/examples/README.md
@@ -7,19 +7,45 @@ To try these out you need to have a running Temporal service ([setup instruction
Install all the gem dependencies by running:
```sh
-> bundle install
+bundle install
```
Modify the `init.rb` file to point to your Temporal cluster.
-Start a worker process:
+Start the three worker processes. Each of these uses a different task queue because there are differences
+in how their payloads are serialized. You typically want to do this by running each line in a separate
+terminal or via tmux or similar.
```sh
-> bin/worker
+bin/worker
+USE_ENCRYPTION=1 bin/worker
+USE_ERROR_SERIALIZATION_V2=1 bin/worker
```
Use this command to trigger one of the example workflows from the `workflows` directory:
```sh
-> bin/trigger NAME_OF_THE_WORKFLOW [argument_1, argument_2, ...]
+bin/trigger NAME_OF_THE_WORKFLOW [argument_1, argument_2, ...]
```
+## Testing
+
+To run tests, make sure the temporal server is running:
+```shell
+docker-compose up
+```
+
+Run the register_namespace script to ensure the ruby-samples namespace and necessary
+search attributes have been created:
+```shell
+bin/register_namespace
+```
+
+Follow the instructions above to start the three worker proceses.
+
+To execute the tests, run:
+```shell
+bundle exec rspec
+```
+To add a new test that uses a new workflow or new activity, make sure to register those new
+workflows and activities by modifying the `bin/worker` file and adding them there. After any
+changes to that file, restart the worker process to pick up the new registrations.
diff --git a/examples/activities/delegator_activity.rb b/examples/activities/delegator_activity.rb
new file mode 100644
index 00000000..1e095d19
--- /dev/null
+++ b/examples/activities/delegator_activity.rb
@@ -0,0 +1,33 @@
+# This sample illustrates using a dynamic Activity to delegate to another set of non-activity
+# classes. This is an advanced use case, used, for example, for integrating with an existing framework
+# that doesn't know about temporal.
+# See Temporal::Worker#register_dynamic_activity for more info.
+
+# An example of another non-Activity class hierarchy.
+class MyExecutor
+ def do_it(_args)
+ raise NotImplementedError
+ end
+end
+
+class Plus < MyExecutor
+ def do_it(args)
+ args[:a] + args[:b]
+ end
+end
+
+class Times < MyExecutor
+ def do_it(args)
+ args[:a] * args[:b]
+ end
+end
+
+# Calls into our other class hierarchy.
+class DelegatorActivity < Temporal::Activity
+ def execute(input)
+ executor = Object.const_get(activity.name).new
+ raise ArgumentError, "Unknown activity: #{executor.class}" unless executor.is_a?(MyExecutor)
+
+ executor.do_it(input)
+ end
+end
diff --git a/examples/activities/failing_with_structured_error_activity.rb b/examples/activities/failing_with_structured_error_activity.rb
new file mode 100644
index 00000000..0d7543ba
--- /dev/null
+++ b/examples/activities/failing_with_structured_error_activity.rb
@@ -0,0 +1,21 @@
+require 'temporal/json'
+
+# Illustrates raising an error with a non-standard initializer that
+# is handleable by the Workflow.
+class FailingWithStructuredErrorActivity < Temporal::Activity
+ retry_policy(max_attempts: 1)
+
+ class MyError < Temporal::ActivityException
+ attr_reader :foo, :bar
+
+ def initialize(foo, bar)
+ @foo = foo
+ @bar = bar
+ end
+ end
+
+ def execute(foo, bar)
+ # Pass activity args into the error for better testing
+ raise MyError.new(foo, bar)
+ end
+end
diff --git a/examples/activities/long_running_activity.rb b/examples/activities/long_running_activity.rb
index 09673776..26cdd576 100644
--- a/examples/activities/long_running_activity.rb
+++ b/examples/activities/long_running_activity.rb
@@ -3,9 +3,14 @@ class Canceled < Temporal::ActivityException; end
def execute(cycles, interval)
cycles.times do
- response = activity.heartbeat
+ # To detect if the activity has been canceled, you can check activity.cancel_requested or
+ # simply heartbeat in which case an ActivityCanceled error will be raised. Cancellation
+ # is only detected through heartbeating, but the setting of this bit can be delayed by
+ # heartbeat throttling which sends the heartbeat on a background thread.
+ activity.logger.info("activity.cancel_requested: #{activity.cancel_requested}")
- if response.cancel_requested
+ activity.heartbeat
+ if activity.cancel_requested
raise Canceled, 'cancel activity request received'
end
diff --git a/examples/activities/terminate_workflow_activity.rb b/examples/activities/terminate_workflow_activity.rb
new file mode 100644
index 00000000..2f1486b8
--- /dev/null
+++ b/examples/activities/terminate_workflow_activity.rb
@@ -0,0 +1,5 @@
+class TerminateWorkflowActivity < Temporal::Activity
+ def execute(namespace, workflow_id, run_id)
+ Temporal.terminate_workflow(workflow_id, namespace: namespace, run_id: run_id)
+ end
+end
diff --git a/examples/bin/query b/examples/bin/query
new file mode 100755
index 00000000..c0e7f719
--- /dev/null
+++ b/examples/bin/query
@@ -0,0 +1,14 @@
+#!/usr/bin/env ruby
+require_relative '../init'
+
+Dir[File.expand_path('../workflows/*.rb', __dir__)].each { |f| require f }
+
+workflow_class_name, workflow_id, run_id, query, args = ARGV
+workflow_class = Object.const_get(workflow_class_name)
+
+if ![workflow_class, workflow_id, run_id, query].all?
+ fail 'Wrong arguments, use `bin/query WORKFLOW WORKFLOW_ID RUN_ID QUERY [ARGS]`'
+end
+
+result = Temporal.query_workflow(workflow_class, query, workflow_id, run_id, args)
+puts result.inspect
diff --git a/examples/bin/register_namespace b/examples/bin/register_namespace
index 9d008c51..e241e95d 100755
--- a/examples/bin/register_namespace
+++ b/examples/bin/register_namespace
@@ -1,11 +1,9 @@
#!/usr/bin/env ruby
require_relative '../init'
-namespace = ARGV[0]
+namespace = ARGV[0] || 'ruby-samples'
description = ARGV[1]
-fail 'Missing namespace name, please run register_namespace ' unless namespace
-
begin
Temporal.register_namespace(namespace, description)
Temporal.logger.info 'Namespace created', { namespace: namespace }
@@ -13,4 +11,32 @@ rescue Temporal::NamespaceAlreadyExistsFailure
Temporal.logger.info 'Namespace already exists', { namespace: namespace }
end
+loop do
+ begin
+ Temporal.list_custom_search_attributes(namespace: namespace)
+ Temporal.logger.info("Namespace is ready", { namespace: namespace })
+ break
+ rescue GRPC::NotFound
+ Temporal.logger.info("Namespace not yet found, waiting and retrying", { namespace: namespace })
+ sleep 1
+ next
+ end
+end
+
+# Register a variety of search attributes for ease of integration testing
+attributes_to_add = {
+ 'CustomStringField' => :text,
+ 'CustomDoubleField' => :double,
+ 'CustomBoolField' => :bool,
+ 'CustomIntField' => :int,
+ 'CustomDatetimeField' => :datetime
+}
+attributes_to_add.each do |name, type|
+ begin
+ Temporal.add_custom_search_attributes({name: type})
+ Temporal.logger.info("Registered search attributes #{name} = #{type}", { namespace: namespace })
+ rescue Temporal::SearchAttributeAlreadyExistsFailure
+ Temporal.logger.info("Default search attribute #{name} already exist for namespace", { namespace: namespace })
+ end
+end
diff --git a/examples/bin/trigger b/examples/bin/trigger
index 725bd35e..34661068 100755
--- a/examples/bin/trigger
+++ b/examples/bin/trigger
@@ -2,6 +2,7 @@
require_relative '../init'
Dir[File.expand_path('../workflows/*.rb', __dir__)].each { |f| require f }
+Dir[File.expand_path('../middleware/*.rb', __dir__)].each { |f| require f }
workflow_class_name, *args = ARGV
workflow_class = Object.const_get(workflow_class_name)
@@ -10,5 +11,9 @@ workflow_id = SecureRandom.uuid
# Convert integer strings to integers
input = args.map { |arg| Integer(arg) rescue arg }
+Temporal.configure do |config|
+ config.add_header_propagator(SamplePropagator)
+end
+
run_id = Temporal.start_workflow(workflow_class, *input, options: { workflow_id: workflow_id })
Temporal.logger.info "Started workflow", { workflow_id: workflow_id, run_id: run_id }
diff --git a/examples/bin/update_replay_test_histories b/examples/bin/update_replay_test_histories
new file mode 100755
index 00000000..bc0f807a
--- /dev/null
+++ b/examples/bin/update_replay_test_histories
@@ -0,0 +1,51 @@
+#!/usr/bin/env ruby
+
+# This script regenerates the workflow history files used in the example replay tests
+# under examples/spec/replay/histories. It starts the necessary workflow, sends some
+# signals, awaits workflow completion, then collects the history into JSON and protobuf
+# binary file formats.
+#
+# To use this, start your Temporal server and bin/worker first. This script can then
+# be run without any arguments. It will overwrite existing history files in the tree.
+#
+# NOTE: By default, collected history files contain the host names of the machines
+# where the worker and this script are run because the default identity is pid@hostname.
+# If you'd like, you can override this by setting an identity in the configuration in
+# init.rb.
+
+require_relative "../init"
+require_relative "../workflows/signal_with_start_workflow"
+
+workflow_id = SecureRandom.uuid
+run_id = Temporal.start_workflow(
+ SignalWithStartWorkflow,
+ "hit",
+ options: {
+ workflow_id: workflow_id,
+ timeouts: {
+ execution: 30
+ },
+ signal_name: "miss",
+ signal_input: 1
+ }
+)
+Temporal.logger.info("Started workflow", {workflow_id: workflow_id, run_id: run_id})
+sleep(1)
+Temporal.signal_workflow(SignalWithStartWorkflow, "miss", workflow_id, run_id, 2)
+sleep(1)
+Temporal.signal_workflow(SignalWithStartWorkflow, "hit", workflow_id, run_id, 3)
+Temporal.await_workflow_result(SignalWithStartWorkflow, workflow_id: workflow_id, run_id: run_id)
+
+# Save in JSON, exactly like would be downloaded from Temporal UI
+history_json = Temporal.get_workflow_history_json(workflow_id: workflow_id, run_id: run_id)
+filename = File.expand_path("../spec/replay/histories/signal_with_start.json", File.dirname(__FILE__))
+File.open(filename, "w") do |f|
+ f.write(history_json)
+end
+
+# Save in protobuf binary format
+history_binary = Temporal.get_workflow_history_protobuf(workflow_id: workflow_id, run_id: run_id)
+filename = File.expand_path("../spec/replay/histories/signal_with_start.protobin", File.dirname(__FILE__))
+File.open(filename, "wb") do |f|
+ f.write(history_binary)
+end
diff --git a/examples/bin/worker b/examples/bin/worker
index 65828dfa..be6f2b97 100755
--- a/examples/bin/worker
+++ b/examples/bin/worker
@@ -1,6 +1,6 @@
#!/usr/bin/env ruby
require_relative '../init'
-require_relative '../lib/cryptconverter'
+require_relative '../lib/crypt_payload_codec'
require 'temporal/worker'
@@ -11,41 +11,76 @@ Dir[File.expand_path('../middleware/*.rb', __dir__)].each { |f| require f }
if !ENV['USE_ENCRYPTION'].nil?
Temporal.configure do |config|
config.task_queue = 'crypt'
- config.converter = Temporal::CryptConverter.new(
- payload_converter: Temporal::Configuration::DEFAULT_CONVERTER
+ config.payload_codec = Temporal::Connection::Converter::Codec::Chain.new(
+ payload_codecs: [
+ Temporal::CryptPayloadCodec.new
+ ]
)
end
end
-worker = Temporal::Worker.new
+if !ENV['USE_ERROR_SERIALIZATION_V2'].nil?
+ Temporal.configure do |config|
+ config.task_queue = 'error_serialization_v2'
+ config.use_error_serialization_v2 = true
+ end
+end
+
+Temporal.configure do |config|
+ config.add_header_propagator(SamplePropagator)
+end
+
+worker = Temporal::Worker.new(binary_checksum: `git show HEAD -s --format=%H`.strip)
worker.register_workflow(AsyncActivityWorkflow)
worker.register_workflow(AsyncHelloWorldWorkflow)
worker.register_workflow(BranchingWorkflow)
+worker.register_workflow(CallsDelegatorWorkflow)
worker.register_workflow(CallFailingActivityWorkflow)
worker.register_workflow(CancellingTimerWorkflow)
worker.register_workflow(CheckWorkflow)
+worker.register_workflow(ChildWorkflowTimeoutWorkflow)
+worker.register_workflow(ChildWorkflowTerminatedWorkflow)
+worker.register_workflow(ContinueAsNewWorkflow)
worker.register_workflow(FailingActivitiesWorkflow)
worker.register_workflow(FailingWorkflow)
+worker.register_workflow(HandlingStructuredErrorWorkflow)
worker.register_workflow(HelloWorldWorkflow)
+worker.register_workflow(InvalidContinueAsNewWorkflow)
worker.register_workflow(LocalHelloWorldWorkflow)
worker.register_workflow(LongWorkflow)
worker.register_workflow(LoopWorkflow)
+worker.register_workflow(MetadataWorkflow)
+worker.register_workflow(ParentCloseWorkflow)
+worker.register_workflow(ParentIdReuseWorkflow)
worker.register_workflow(ParentWorkflow)
worker.register_workflow(ProcessFileWorkflow)
+worker.register_workflow(QueryWorkflow)
worker.register_workflow(QuickTimeoutWorkflow)
worker.register_workflow(RandomlyFailingWorkflow)
worker.register_workflow(ReleaseWorkflow)
worker.register_workflow(ResultWorkflow)
+worker.register_workflow(ScheduleChildWorkflow)
+worker.register_workflow(SendSignalToExternalWorkflow)
worker.register_workflow(SerialHelloWorldWorkflow)
worker.register_workflow(SideEffectWorkflow)
+worker.register_workflow(SignalWithStartWorkflow)
+worker.register_workflow(SignalWorkflow)
worker.register_workflow(SimpleTimerWorkflow)
+worker.register_workflow(SlowChildWorkflow)
+worker.register_workflow(StartChildWorkflowWorkflow)
worker.register_workflow(TimeoutWorkflow)
worker.register_workflow(TripBookingWorkflow)
+worker.register_workflow(UpsertSearchAttributesWorkflow)
+worker.register_workflow(WaitForWorkflow)
+worker.register_workflow(WaitForExternalSignalWorkflow)
+worker.register_workflow(WaitForNamedSignalWorkflow)
+worker.register_dynamic_workflow(DelegatorWorkflow)
worker.register_activity(AsyncActivity)
worker.register_activity(EchoActivity)
worker.register_activity(FailingActivity)
+worker.register_activity(FailingWithStructuredErrorActivity)
worker.register_activity(GenerateFileActivity)
worker.register_activity(GuessActivity)
worker.register_activity(HelloWorldActivity)
@@ -53,6 +88,7 @@ worker.register_activity(LongRunningActivity)
worker.register_activity(ProcessFileActivity)
worker.register_activity(RandomlyFailingActivity)
worker.register_activity(RandomNumberActivity)
+worker.register_activity(TerminateWorkflowActivity)
worker.register_activity(SleepActivity)
worker.register_activity(UploadFileActivity)
worker.register_activity(Trip::BookFlightActivity)
@@ -62,8 +98,11 @@ worker.register_activity(Trip::CancelFlightActivity)
worker.register_activity(Trip::CancelHotelActivity)
worker.register_activity(Trip::MakePaymentActivity)
worker.register_activity(Trip::RentCarActivity)
+worker.register_dynamic_activity(DelegatorActivity)
worker.add_workflow_task_middleware(LoggingMiddleware, 'EXAMPLE')
worker.add_activity_middleware(LoggingMiddleware, 'EXAMPLE')
+worker.add_activity_middleware(SamplePropagator)
+worker.add_workflow_middleware(SamplePropagator)
worker.start
diff --git a/examples/docker-compose.yml b/examples/docker-compose.yml
index 0f98b630..03d87744 100644
--- a/examples/docker-compose.yml
+++ b/examples/docker-compose.yml
@@ -2,12 +2,14 @@ version: '3.5'
services:
temporal:
- image: temporalio/auto-setup:latest
+ image: temporalio/auto-setup:1.22.0
ports:
- "7233:7233"
environment:
- "CASSANDRA_SEEDS=cassandra"
- - "DYNAMIC_CONFIG_FILE_PATH=config/dynamicconfig/development.yaml"
+ - "DYNAMIC_CONFIG_FILE_PATH=/etc/temporal/config/dynamicconfig/temporal-ruby.yaml"
+ volumes:
+ - ./dynamic-config.yml:/etc/temporal/config/dynamicconfig/temporal-ruby.yaml
depends_on:
- cassandra
diff --git a/examples/dynamic-config.yml b/examples/dynamic-config.yml
new file mode 100644
index 00000000..4481cb02
--- /dev/null
+++ b/examples/dynamic-config.yml
@@ -0,0 +1,2 @@
+system.forceSearchAttributesCacheRefreshOnRead:
+ - value: true
\ No newline at end of file
diff --git a/examples/init.rb b/examples/init.rb
index ab4e1b3a..053c0e14 100644
--- a/examples/init.rb
+++ b/examples/init.rb
@@ -8,10 +8,13 @@
metrics_logger = Logger.new(STDOUT, progname: 'metrics')
+DEFAULT_NAMESPACE = 'ruby-samples'.freeze
+DEFAULT_TASK_QUEUE = 'general'.freeze
+
Temporal.configure do |config|
config.host = ENV.fetch('TEMPORAL_HOST', 'localhost')
config.port = ENV.fetch('TEMPORAL_PORT', 7233).to_i
- config.namespace = ENV.fetch('TEMPORAL_NAMESPACE', 'ruby-samples')
- config.task_queue = ENV.fetch('TEMPORAL_TASK_QUEUE', 'general')
+ config.namespace = ENV.fetch('TEMPORAL_NAMESPACE', DEFAULT_NAMESPACE)
+ config.task_queue = ENV.fetch('TEMPORAL_TASK_QUEUE', DEFAULT_TASK_QUEUE)
config.metrics_adapter = Temporal::MetricsAdapters::Log.new(metrics_logger)
end
diff --git a/examples/lib/cryptconverter.rb b/examples/lib/crypt_payload_codec.rb
similarity index 67%
rename from examples/lib/cryptconverter.rb
rename to examples/lib/crypt_payload_codec.rb
index b3c7b77a..72e6769d 100644
--- a/examples/lib/cryptconverter.rb
+++ b/examples/lib/crypt_payload_codec.rb
@@ -1,7 +1,8 @@
require 'openssl'
+require 'temporal/connection/converter/codec/base'
module Temporal
- class CryptConverter < Temporal::Connection::Converter::Base
+ class CryptPayloadCodec < Temporal::Connection::Converter::Codec::Base
CIPHER = 'aes-256-gcm'.freeze
GCM_NONCE_SIZE = 12
GCM_TAG_SIZE = 16
@@ -10,26 +11,23 @@ class CryptConverter < Temporal::Connection::Converter::Base
METADATA_ENCODING_KEY = 'encoding'.freeze
METADATA_ENCODING = 'binary/encrypted'.freeze
- def to_payloads(data)
+ def encode(payload)
+ return nil if payload.nil?
+
key_id = get_key_id
key = get_key(key_id)
- payloads = super(data)
-
- Temporal::Api::Common::V1::Payloads.new(
- payloads: payloads.payloads.map { |payload| encrypt_payload(payload, key_id, key) }
- )
+ encrypt_payload(payload, key_id, key)
end
+
+ def decode(payload)
+ return nil if payload.nil?
- def from_payloads(payloads)
- return nil if payloads.nil?
-
- payloads.payloads.map do |payload|
- if payload.metadata[METADATA_ENCODING_KEY] == METADATA_ENCODING
- payload = decrypt_payload(payload)
- end
- from_payload(payload)
+ if payload.metadata[METADATA_ENCODING_KEY] == METADATA_ENCODING
+ payload = decrypt_payload(payload)
end
+
+ payload
end
private
@@ -55,12 +53,12 @@ def encrypt(data, key)
end
def encrypt_payload(payload, key_id, key)
- Temporal::Api::Common::V1::Payload.new(
+ Temporalio::Api::Common::V1::Payload.new(
metadata: {
METADATA_ENCODING_KEY => METADATA_ENCODING,
METADATA_KEY_ID_KEY => key_id,
},
- data: encrypt(Temporal::Api::Common::V1::Payload.encode(payload), key)
+ data: encrypt(Temporalio::Api::Common::V1::Payload.encode(payload), key)
)
end
@@ -85,7 +83,7 @@ def decrypt_payload(payload)
key = get_key(key_id)
serialized_payload = decrypt(payload.data, key)
- Temporal::Api::Common::V1::Payload.decode(serialized_payload)
+ Temporalio::Api::Common::V1::Payload.decode(serialized_payload)
end
end
end
diff --git a/examples/middleware/sample_propagator.rb b/examples/middleware/sample_propagator.rb
new file mode 100644
index 00000000..59bb59f0
--- /dev/null
+++ b/examples/middleware/sample_propagator.rb
@@ -0,0 +1,10 @@
+class SamplePropagator
+ def inject!(headers)
+ headers['test-header'] = 'test'
+ end
+
+ def call(metadata)
+ Temporal.logger.info("Got headers!", headers: metadata.headers.to_h)
+ yield
+ end
+end
\ No newline at end of file
diff --git a/examples/spec/helpers.rb b/examples/spec/helpers.rb
index 4d6d9a20..4d4c65a4 100644
--- a/examples/spec/helpers.rb
+++ b/examples/spec/helpers.rb
@@ -2,14 +2,10 @@
module Helpers
def run_workflow(workflow, *input, **args)
- workflow_id = SecureRandom.uuid
- run_id = Temporal.start_workflow(
- workflow,
- *input,
- **args.merge(options: { workflow_id: workflow_id })
- )
+ args[:options] = { workflow_id: SecureRandom.uuid }.merge(args[:options] || {})
+ run_id = Temporal.start_workflow(workflow, *input, **args)
- return workflow_id, run_id
+ [args[:options][:workflow_id], run_id]
end
def wait_for_workflow_completion(workflow_id, run_id)
@@ -24,13 +20,20 @@ def wait_for_workflow_completion(workflow_id, run_id)
def fetch_history(workflow_id, run_id, options = {})
connection = Temporal.send(:default_client).send(:connection)
+ options = {
+ namespace: integration_spec_namespace,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ }.merge(options)
- result = connection.get_workflow_execution_history(
- {
- namespace: Temporal.configuration.namespace,
- workflow_id: workflow_id,
- run_id: run_id,
- }.merge(options)
- )
+ connection.get_workflow_execution_history(**options)
+ end
+
+ def integration_spec_namespace
+ ENV.fetch('TEMPORAL_NAMESPACE', DEFAULT_NAMESPACE)
+ end
+
+ def integration_spec_task_queue
+ ENV.fetch('TEMPORAL_TASK_QUEUE', DEFAULT_TASK_QUEUE)
end
end
diff --git a/examples/spec/integration/activity_cancellation_spec.rb b/examples/spec/integration/activity_cancellation_spec.rb
new file mode 100644
index 00000000..ca39d639
--- /dev/null
+++ b/examples/spec/integration/activity_cancellation_spec.rb
@@ -0,0 +1,36 @@
+require 'workflows/long_workflow'
+
+describe 'Activity cancellation', :integration do
+ it 'cancels a running activity' do
+ workflow_id, run_id = run_workflow(LongWorkflow)
+
+ # Signal workflow after starting, allowing it to schedule the first activity
+ sleep 0.5
+ Temporal.signal_workflow(LongWorkflow, :CANCEL, workflow_id, run_id)
+
+ result = Temporal.await_workflow_result(
+ LongWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+
+ expect(result).to be_a(LongRunningActivity::Canceled)
+ expect(result.message).to eq('cancel activity request received')
+ end
+
+ it 'cancels a non-started activity' do
+ # Workflow is started with a signal which will cancel an activity before it has started
+ workflow_id, run_id = run_workflow(LongWorkflow, options: {
+ signal_name: :CANCEL
+ })
+
+ result = Temporal.await_workflow_result(
+ LongWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+
+ expect(result).to be_a(Temporal::ActivityCanceled)
+ expect(result.message).to eq('ACTIVITY_ID_NOT_STARTED')
+ end
+end
diff --git a/examples/spec/integration/await_workflow_result_spec.rb b/examples/spec/integration/await_workflow_result_spec.rb
index 3b4f1e77..d4d4977d 100644
--- a/examples/spec/integration/await_workflow_result_spec.rb
+++ b/examples/spec/integration/await_workflow_result_spec.rb
@@ -10,7 +10,7 @@
run_id = Temporal.start_workflow(
ResultWorkflow,
expected_result,
- { options: { workflow_id: workflow_id } },
+ options: { workflow_id: workflow_id },
)
actual_result = Temporal.await_workflow_result(
ResultWorkflow,
@@ -26,7 +26,7 @@
first_run_id = Temporal.start_workflow(
ResultWorkflow,
expected_first_result,
- { options: { workflow_id: workflow_id } },
+ options: { workflow_id: workflow_id },
)
actual_first_result = Temporal.await_workflow_result(
ResultWorkflow,
@@ -38,7 +38,7 @@
Temporal.start_workflow(
ResultWorkflow,
expected_second_result,
- { options: { workflow_id: workflow_id } },
+ options: { workflow_id: workflow_id },
)
actual_second_result = Temporal.await_workflow_result(
ResultWorkflow,
@@ -59,7 +59,7 @@
workflow_id = SecureRandom.uuid
run_id = Temporal.start_workflow(
FailingWorkflow,
- { options: { workflow_id: workflow_id } },
+ options: { workflow_id: workflow_id },
)
expect do
@@ -78,7 +78,7 @@
workflow_id = SecureRandom.uuid
run_id = Temporal.start_workflow(
QuickTimeoutWorkflow,
- { options: { workflow_id: workflow_id } },
+ options: { workflow_id: workflow_id },
)
expect do
@@ -95,7 +95,9 @@
run_id = Temporal.start_workflow(
LoopWorkflow,
2, # it continues as new if this arg is > 1
- { options: { workflow_id: workflow_id } },
+ options: {
+ workflow_id: workflow_id,
+ },
)
expect do
diff --git a/examples/spec/integration/call_failing_activity_workflow_spec.rb b/examples/spec/integration/call_failing_activity_workflow_spec.rb
index eef65424..090dd312 100644
--- a/examples/spec/integration/call_failing_activity_workflow_spec.rb
+++ b/examples/spec/integration/call_failing_activity_workflow_spec.rb
@@ -1,15 +1,10 @@
require 'workflows/call_failing_activity_workflow'
describe CallFailingActivityWorkflow, :integration do
-
- class TestDeserializer
- include Temporal::Concerns::Payloads
- end
-
it 'correctly re-raises an activity-thrown exception in the workflow' do
workflow_id = SecureRandom.uuid
expected_message = "a failure message"
- Temporal.start_workflow(described_class, expected_message, { options: { workflow_id: workflow_id } })
+ Temporal.start_workflow(described_class, expected_message, options: { workflow_id: workflow_id })
expect do
Temporal.await_workflow_result(described_class, workflow_id: workflow_id)
end.to raise_error(FailingActivity::MyError, "a failure message")
diff --git a/examples/spec/integration/child_workflow_terminated_workflow_spec.rb b/examples/spec/integration/child_workflow_terminated_workflow_spec.rb
new file mode 100644
index 00000000..f8ad62a2
--- /dev/null
+++ b/examples/spec/integration/child_workflow_terminated_workflow_spec.rb
@@ -0,0 +1,22 @@
+require 'workflows/child_workflow_terminated_workflow.rb'
+
+describe ChildWorkflowTerminatedWorkflow do
+ subject { described_class }
+
+ it 'successfully can catch if a child workflow times out' do
+ workflow_id = SecureRandom.uuid
+
+ Temporal.start_workflow(
+ subject,
+ options: { workflow_id: workflow_id }
+ )
+
+ result = Temporal.await_workflow_result(
+ subject,
+ workflow_id: workflow_id
+ )
+
+ expect(result[:child_workflow_terminated]).to eq(true)
+ expect(result[:error]).to be_a(Temporal::ChildWorkflowTerminatedError)
+ end
+end
diff --git a/examples/spec/integration/child_workflow_timeout_workflow_spec.rb b/examples/spec/integration/child_workflow_timeout_workflow_spec.rb
new file mode 100644
index 00000000..43736b7b
--- /dev/null
+++ b/examples/spec/integration/child_workflow_timeout_workflow_spec.rb
@@ -0,0 +1,22 @@
+require 'workflows/child_workflow_timeout_workflow.rb'
+
+describe ChildWorkflowTimeoutWorkflow do
+ subject { described_class }
+
+ it 'successfully can catch if a child workflow times out' do
+ workflow_id = SecureRandom.uuid
+
+ Temporal.start_workflow(
+ subject,
+ options: { workflow_id: workflow_id }
+ )
+
+ result = Temporal.await_workflow_result(
+ subject,
+ workflow_id: workflow_id
+ )
+ puts result
+ expect(result[:child_workflow_failed]).to eq(true)
+ expect(result[:error]).to be_a(Temporal::ChildWorkflowTimeoutError)
+ end
+end
diff --git a/examples/spec/integration/continue_as_new_spec.rb b/examples/spec/integration/continue_as_new_spec.rb
new file mode 100644
index 00000000..75e9f4e0
--- /dev/null
+++ b/examples/spec/integration/continue_as_new_spec.rb
@@ -0,0 +1,93 @@
+require 'workflows/continue_as_new_workflow'
+require 'workflows/loop_workflow'
+
+describe LoopWorkflow do
+ it 'workflow continues as new into a new run' do
+ workflow_id = SecureRandom.uuid
+ memo = {
+ 'my-memo' => 'foo',
+ }
+ headers = {
+ 'my-header' => 'bar',
+ 'test-header' => 'test',
+ }
+ run_id = Temporal.start_workflow(
+ LoopWorkflow,
+ 2, # it continues as new if this arg is > 1
+ options: {
+ workflow_id: workflow_id,
+ memo: memo,
+ headers: headers,
+ },
+ )
+
+ # First run will throw because it continued as new
+ next_run_id = nil
+ expect do
+ Temporal.await_workflow_result(
+ LoopWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+ end.to raise_error(Temporal::WorkflowRunContinuedAsNew) do |error|
+ next_run_id = error.new_run_id
+ end
+
+ expect(next_run_id).to_not eq(nil)
+
+ # Second run will not throw because it returns rather than continues as new.
+ final_result = Temporal.await_workflow_result(
+ LoopWorkflow,
+ workflow_id: workflow_id,
+ run_id: next_run_id,
+ )
+
+ expect(final_result[:count]).to eq(1)
+
+ # memo and headers should be copied to the next run automatically
+ expect(final_result[:memo]).to eq(memo)
+ expect(final_result[:headers]).to eq(headers)
+ end
+
+ it 'uses history bytes size to continue as new' do
+ workflow_id = SecureRandom.uuid
+ # 7 activity invocations produce about 10,000 bytes of history. This should
+ # result in one continue as new with 7 activities in the first and 3 in the
+ # second run.
+ run_id = Temporal.start_workflow(
+ ContinueAsNewWorkflow,
+ 10, # hello count
+ 10_000, # max bytes limit
+ options: {
+ workflow_id: workflow_id,
+ timeouts: {
+ execution: 60,
+ run: 20
+ }
+ },
+ )
+
+ # First run will throw because it continued as new
+ next_run_id = nil
+ expect do
+ Temporal.await_workflow_result(
+ ContinueAsNewWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+ end.to raise_error(Temporal::WorkflowRunContinuedAsNew) do |error|
+ next_run_id = error.new_run_id
+ end
+
+ expect(next_run_id).to_not eq(nil)
+
+ # Second run will not throw because it returns rather than continues as new.
+ final_result = Temporal.await_workflow_result(
+ ContinueAsNewWorkflow,
+ workflow_id: workflow_id,
+ run_id: next_run_id,
+ )
+
+ expect(final_result[:runs]).to eq(2)
+ end
+end
diff --git a/examples/spec/integration/converter_spec.rb b/examples/spec/integration/converter_spec.rb
index ce1ea66a..576ff55f 100644
--- a/examples/spec/integration/converter_spec.rb
+++ b/examples/spec/integration/converter_spec.rb
@@ -1,29 +1,38 @@
require 'workflows/hello_world_workflow'
-require 'lib/cryptconverter'
+require 'lib/crypt_payload_codec'
+require 'grpc/errors'
describe 'Converter', :integration do
- around(:each) do |example|
- task_queue = Temporal.configuration.task_queue
+ let(:codec) do
+ Temporal::Connection::Converter::Codec::Chain.new(
+ payload_codecs: [
+ Temporal::CryptPayloadCodec.new
+ ]
+ )
+ end
+ around(:each) do |example|
Temporal.configure do |config|
config.task_queue = 'crypt'
- config.converter = Temporal::CryptConverter.new(
- payload_converter: Temporal::Configuration::DEFAULT_CONVERTER
- )
+ config.payload_codec = codec
end
example.run
ensure
Temporal.configure do |config|
- config.task_queue = task_queue
- config.converter = Temporal::Configuration::DEFAULT_CONVERTER
+ config.task_queue = integration_spec_task_queue
+ config.payload_codec = Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
end
end
it 'can encrypt payloads' do
workflow_id, run_id = run_workflow(HelloWorldWorkflow, 'Tom')
- wait_for_workflow_completion(workflow_id, run_id)
+ begin
+ wait_for_workflow_completion(workflow_id, run_id)
+ rescue GRPC::DeadlineExceeded
+ raise "Encrypted-payload workflow didn't run. Make sure you run USE_ENCRYPTION=1 ./bin/worker and try again."
+ end
result = fetch_history(workflow_id, run_id)
@@ -60,8 +69,6 @@
completion_event = events[:EVENT_TYPE_WORKFLOW_EXECUTION_COMPLETED].first
result = completion_event.workflow_execution_completed_event_attributes.result
- converter = Temporal.configuration.converter
-
- expect(converter.from_payloads(result)&.first).to eq('Hello World, Tom')
+ expect(codec.decodes(result).payloads.first.data).to eq('"Hello World, Tom"')
end
end
diff --git a/examples/spec/integration/create_schedule_spec.rb b/examples/spec/integration/create_schedule_spec.rb
new file mode 100644
index 00000000..a7ae3a40
--- /dev/null
+++ b/examples/spec/integration/create_schedule_spec.rb
@@ -0,0 +1,87 @@
+require "temporal/errors"
+require "temporal/schedule/backfill"
+require "temporal/schedule/calendar"
+require "temporal/schedule/interval"
+require "temporal/schedule/schedule"
+require "temporal/schedule/schedule_spec"
+require "temporal/schedule/schedule_policies"
+require "temporal/schedule/schedule_state"
+require "temporal/schedule/start_workflow_action"
+
+describe "Temporal.create_schedule", :integration do
+ let(:example_schedule) do
+ workflow_id = SecureRandom.uuid
+ Temporal::Schedule::Schedule.new(
+ spec: Temporal::Schedule::ScheduleSpec.new(
+ calendars: [Temporal::Schedule::Calendar.new(day_of_week: "*", hour: "18", minute: "30")],
+ intervals: [Temporal::Schedule::Interval.new(every: 6000, offset: 300)],
+ cron_expressions: ["@hourly"],
+ jitter: 30,
+ # Set an end time so that the test schedule doesn't run forever
+ end_time: Time.now + 600
+ ),
+ action: Temporal::Schedule::StartWorkflowAction.new(
+ "HelloWorldWorkflow",
+ "Test",
+ options: {
+ workflow_id: workflow_id,
+ task_queue: integration_spec_task_queue
+ }
+ ),
+ policies: Temporal::Schedule::SchedulePolicies.new(
+ overlap_policy: :buffer_one
+ ),
+ state: Temporal::Schedule::ScheduleState.new(
+ notes: "Created by integration test"
+ )
+ )
+ end
+
+ it "can create schedules" do
+ namespace = integration_spec_namespace
+
+ schedule_id = SecureRandom.uuid
+
+ create_response = Temporal.create_schedule(
+ namespace,
+ schedule_id,
+ example_schedule,
+ memo: {"schedule_memo" => "schedule memo value"},
+ trigger_immediately: true,
+ backfill: Temporal::Schedule::Backfill.new(start_time: (Date.today - 90).to_time, end_time: Time.now)
+ )
+ expect(create_response).to(be_an_instance_of(Temporalio::Api::WorkflowService::V1::CreateScheduleResponse))
+
+ describe_response = Temporal.describe_schedule(namespace, schedule_id)
+
+ expect(describe_response.memo).to(eq({"schedule_memo" => "schedule memo value"}))
+ expect(describe_response.schedule.spec.jitter.seconds).to(eq(30))
+ expect(describe_response.schedule.policies.overlap_policy).to(eq(:SCHEDULE_OVERLAP_POLICY_BUFFER_ONE))
+ expect(describe_response.schedule.action.start_workflow.workflow_type.name).to(eq("HelloWorldWorkflow"))
+ expect(describe_response.schedule.state.notes).to(eq("Created by integration test"))
+ end
+
+ it "can create schedules with a minimal set of fields" do
+ namespace = integration_spec_namespace
+ schedule_id = SecureRandom.uuid
+
+ schedule = Temporal::Schedule::Schedule.new(
+ spec: Temporal::Schedule::ScheduleSpec.new(
+ cron_expressions: ["@hourly"],
+ # Set an end time so that the test schedule doesn't run forever
+ end_time: Time.now + 600
+ ),
+ action: Temporal::Schedule::StartWorkflowAction.new(
+ "HelloWorldWorkflow",
+ "Test",
+ options: {task_queue: integration_spec_task_queue}
+ )
+ )
+
+ Temporal.create_schedule(namespace, schedule_id, schedule)
+
+ describe_response = Temporal.describe_schedule(namespace, schedule_id)
+ expect(describe_response.schedule.action.start_workflow.workflow_type.name).to(eq("HelloWorldWorkflow"))
+ expect(describe_response.schedule.policies.overlap_policy).to(eq(:SCHEDULE_OVERLAP_POLICY_SKIP))
+ end
+end
diff --git a/examples/spec/integration/delete_schedule_spec.rb b/examples/spec/integration/delete_schedule_spec.rb
new file mode 100644
index 00000000..c621710d
--- /dev/null
+++ b/examples/spec/integration/delete_schedule_spec.rb
@@ -0,0 +1,50 @@
+require "temporal/errors"
+require "temporal/schedule/schedule"
+require "temporal/schedule/schedule_spec"
+require "temporal/schedule/start_workflow_action"
+
+describe "Temporal.delete_schedule", :integration do
+ let(:example_schedule) do
+ Temporal::Schedule::Schedule.new(
+ spec: Temporal::Schedule::ScheduleSpec.new(
+ cron_expressions: ["@hourly"],
+ # Set an end time so that the test schedule doesn't run forever
+ end_time: Time.now + 600
+ ),
+ action: Temporal::Schedule::StartWorkflowAction.new(
+ "HelloWorldWorkflow",
+ "Test",
+ options: {
+ task_queue: integration_spec_task_queue
+ }
+ )
+ )
+ end
+
+ it "can delete schedules" do
+ namespace = integration_spec_namespace
+
+ schedule_id = SecureRandom.uuid
+
+ Temporal.create_schedule(namespace, schedule_id, example_schedule)
+ describe_response = Temporal.describe_schedule(namespace, schedule_id)
+ expect(describe_response.schedule.action.start_workflow.workflow_type.name).to(eq("HelloWorldWorkflow"))
+
+ Temporal.delete_schedule(namespace, schedule_id)
+
+ # Now that the schedule is delted it should raise a not found error
+ expect do
+ Temporal.describe_schedule(namespace, schedule_id)
+ end
+ .to(raise_error(Temporal::NotFoundFailure))
+ end
+
+ it "raises a NotFoundFailure if a schedule doesn't exist" do
+ namespace = integration_spec_namespace
+
+ expect do
+ Temporal.delete_schedule(namespace, "some-invalid-schedule-id")
+ end
+ .to(raise_error(Temporal::NotFoundFailure))
+ end
+end
diff --git a/examples/spec/integration/describe_namespace_spec.rb b/examples/spec/integration/describe_namespace_spec.rb
new file mode 100644
index 00000000..fe2416db
--- /dev/null
+++ b/examples/spec/integration/describe_namespace_spec.rb
@@ -0,0 +1,19 @@
+require 'temporal/errors'
+
+describe 'Temporal.describe_namespace', :integration do
+ it 'returns a value' do
+ namespace = integration_spec_namespace
+ rescued = false
+ begin
+ Temporal.register_namespace(namespace)
+ rescue Temporal::NamespaceAlreadyExistsFailure
+ rescued = true
+ end
+ expect(rescued).to eq(true)
+ result = Temporal.describe_namespace(namespace)
+ expect(result).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::DescribeNamespaceResponse)
+ expect(result.namespace_info.name).to eq(namespace)
+ expect(result.namespace_info.state).to eq(:NAMESPACE_STATE_REGISTERED)
+ expect(result.namespace_info.description).to_not eq(nil)
+ end
+end
diff --git a/examples/spec/integration/dynamic_activity_spec.rb b/examples/spec/integration/dynamic_activity_spec.rb
new file mode 100644
index 00000000..1abec847
--- /dev/null
+++ b/examples/spec/integration/dynamic_activity_spec.rb
@@ -0,0 +1,19 @@
+require 'workflows/calls_delegator_workflow'
+
+describe 'Dynamic activities' do
+ let(:workflow_id) { SecureRandom.uuid }
+
+ it 'can delegate to other classes' do
+ run_id = Temporal.start_workflow(CallsDelegatorWorkflow, options: {
+ workflow_id: workflow_id
+ })
+
+ result = Temporal.await_workflow_result(
+ CallsDelegatorWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id
+ )
+ expect(result[:sum]).to eq(8)
+ expect(result[:product]).to eq(15)
+ end
+end
diff --git a/examples/spec/integration/dynamic_workflow_spec.rb b/examples/spec/integration/dynamic_workflow_spec.rb
new file mode 100644
index 00000000..39cff9c7
--- /dev/null
+++ b/examples/spec/integration/dynamic_workflow_spec.rb
@@ -0,0 +1,37 @@
+require 'workflows/delegator_workflow'
+
+describe 'Dynamic workflows' do
+ let(:workflow_id) { SecureRandom.uuid }
+
+ it 'can delegate to other classes' do
+ # PlusExecutor and TimesExecutor do not subclass Workflow
+ run_id = Temporal.start_workflow(
+ PlusExecutor,
+ {a: 5, b: 3},
+ options: {
+ workflow_id: workflow_id
+ })
+
+ result = Temporal.await_workflow_result(
+ PlusExecutor,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+ expect(result[:computation]).to eq(8)
+
+ run_id = Temporal.start_workflow(
+ TimesExecutor,
+ {a: 5, b: 3},
+ options: {
+ workflow_id: workflow_id
+ })
+
+ result = Temporal.await_workflow_result(
+ TimesExecutor,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+ expect(result[:computation]).to eq(15)
+
+ end
+end
diff --git a/examples/spec/integration/handling_structured_error_workflow_spec.rb b/examples/spec/integration/handling_structured_error_workflow_spec.rb
new file mode 100644
index 00000000..91096453
--- /dev/null
+++ b/examples/spec/integration/handling_structured_error_workflow_spec.rb
@@ -0,0 +1,31 @@
+require 'workflows/handling_structured_error_workflow'
+
+describe HandlingStructuredErrorWorkflow, :integration do
+ # This test should be run when a worker with USE_ERROR_SERIALIZATION_V2 is running.
+ # That worker runs a task queue, error_serialization_v2. This setup code will
+ # route workflow requests to that task queue.
+ around(:each) do |example|
+ Temporal.configure do |config|
+ config.task_queue = 'error_serialization_v2'
+ end
+
+ example.run
+ ensure
+ Temporal.configure do |config|
+ config.task_queue = integration_spec_task_queue
+ end
+ end
+
+ it 'correctly re-raises an activity-thrown exception in the workflow' do
+ workflow_id = SecureRandom.uuid
+
+ Temporal.start_workflow(described_class, 'foo', 5.0, options: { workflow_id: workflow_id })
+ begin
+ result = Temporal.await_workflow_result(described_class, workflow_id: workflow_id)
+ expect(result).to eq('successfully handled error')
+ rescue Temporal::ActivityException
+ raise "Error deserialization failed. You probably need to run USE_ERROR_SERIALIZATION_V2=1 ./bin/worker and try again."
+ end
+ end
+
+end
diff --git a/examples/spec/integration/initial_search_attributes_spec.rb b/examples/spec/integration/initial_search_attributes_spec.rb
new file mode 100644
index 00000000..7ed26eec
--- /dev/null
+++ b/examples/spec/integration/initial_search_attributes_spec.rb
@@ -0,0 +1,65 @@
+require 'workflows/upsert_search_attributes_workflow'
+require 'time'
+
+describe 'starting workflow with initial search attributes', :integration do
+ it 'has attributes appear in final execution info, but can get overriden by upserting' do
+ workflow_id = 'initial_search_attributes_test_wf-' + SecureRandom.uuid
+ expected_binary_checksum = `git show HEAD -s --format=%H`.strip
+
+ initial_search_attributes = {
+ 'CustomBoolField' => false,
+ 'CustomIntField' => -1,
+ 'CustomDatetimeField' => Time.now,
+
+ # These should get overriden when the workflow upserts them
+ 'CustomStringField' => 'meow',
+ 'CustomDoubleField' => 6.28,
+ }
+ # Override some of the initial search attributes by upserting them during the workflow execution.
+ upserted_search_attributes = {
+ 'CustomStringField' => 'moo',
+ 'CustomDoubleField' => 3.14,
+ }
+ expected_custom_attributes = initial_search_attributes.merge(upserted_search_attributes)
+ # Datetime fields get converted to the Time#iso8601 format, in UTC
+ expected_custom_attributes['CustomDatetimeField'] = expected_custom_attributes['CustomDatetimeField'].utc.iso8601
+
+ run_id = Temporal.start_workflow(
+ UpsertSearchAttributesWorkflow,
+ string_value: upserted_search_attributes['CustomStringField'],
+ float_value: upserted_search_attributes['CustomDoubleField'],
+ # Don't upsert anything for the bool, int, or time search attributes;
+ # their values should be the initial ones set when first starting the workflow.
+ bool_value: nil,
+ int_value: nil,
+ time_value: nil,
+ options: {
+ workflow_id: workflow_id,
+ search_attributes: initial_search_attributes,
+ },
+ )
+
+ # UpsertSearchAttributesWorkflow returns the search attributes it upserted during its execution
+ attributes_at_end = Temporal.await_workflow_result(
+ UpsertSearchAttributesWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+ expect(attributes_at_end).to eq(expected_custom_attributes)
+
+ # These attributes are set for the worker in bin/worker
+ expected_attributes = {
+ # Contains a list of all binary checksums seen for this workflow execution
+ 'BinaryChecksums' => [expected_binary_checksum]
+ }.merge(expected_custom_attributes)
+
+ execution_info = Temporal.fetch_workflow_execution_info(
+ integration_spec_namespace,
+ workflow_id,
+ nil
+ )
+ # Temporal might add new built-in search attributes, so just assert that
+ # the expected attributes are a subset of the actual attributes:
+ expect(execution_info.search_attributes).to be >= expected_attributes
+ end
+end
diff --git a/examples/spec/integration/list_namespaces_spec.rb b/examples/spec/integration/list_namespaces_spec.rb
new file mode 100644
index 00000000..974e882d
--- /dev/null
+++ b/examples/spec/integration/list_namespaces_spec.rb
@@ -0,0 +1,6 @@
+describe 'Temporal.list_namespaces', :integration do
+ it 'returns the correct values' do
+ result = Temporal.list_namespaces(page_size: 100)
+ expect(result).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListNamespacesResponse)
+ end
+end
diff --git a/examples/spec/integration/list_schedules_spec.rb b/examples/spec/integration/list_schedules_spec.rb
new file mode 100644
index 00000000..abd6b862
--- /dev/null
+++ b/examples/spec/integration/list_schedules_spec.rb
@@ -0,0 +1,109 @@
+require "timeout"
+require "temporal/errors"
+require "temporal/schedule/backfill"
+require "temporal/schedule/calendar"
+require "temporal/schedule/interval"
+require "temporal/schedule/schedule"
+require "temporal/schedule/schedule_spec"
+require "temporal/schedule/schedule_policies"
+require "temporal/schedule/schedule_state"
+require "temporal/schedule/start_workflow_action"
+
+describe "Temporal.list_schedules", :integration do
+ let(:example_schedule) do
+ workflow_id = SecureRandom.uuid
+ Temporal::Schedule::Schedule.new(
+ spec: Temporal::Schedule::ScheduleSpec.new(
+ cron_expressions: ["@hourly"],
+ # Set an end time so that the test schedule doesn't run forever
+ end_time: Time.now + 600
+ ),
+ action: Temporal::Schedule::StartWorkflowAction.new(
+ "HelloWorldWorkflow",
+ "Test",
+ options: {
+ task_queue: integration_spec_task_queue
+ }
+ )
+ )
+ end
+
+ def cleanup
+ namespace = integration_spec_namespace
+ loop do
+ resp = Temporal.list_schedules(namespace, maximum_page_size: 1000)
+ resp.schedules.each do |schedule|
+ begin
+ Temporal.delete_schedule(namespace, schedule.schedule_id)
+ rescue Temporal::NotFoundFailure
+ # This sometimes throws if a schedule has already been 'completed' (end time is reached)
+ end
+ end
+ break if resp.next_page_token == ""
+ end
+ end
+
+ before do
+ cleanup
+ end
+
+
+ it "can list schedules with pagination" do
+ namespace = integration_spec_namespace
+
+ 10.times do
+ schedule_id = SecureRandom.uuid
+ Temporal.create_schedule(namespace, schedule_id, example_schedule)
+ end
+
+ # list_schedules is eventually consistent. Wait until at least 10 schedules are returned
+ Timeout.timeout(10) do
+ loop do
+ result = Temporal.list_schedules(namespace, maximum_page_size: 100)
+
+ break if result && result.schedules.count >= 10
+
+ sleep(0.5)
+ end
+ end
+
+ page_one = Temporal.list_schedules(namespace, maximum_page_size: 2)
+ expect(page_one.schedules.count).to(eq(2))
+ page_two = Temporal.list_schedules(namespace, next_page_token: page_one.next_page_token, maximum_page_size: 8)
+ expect(page_two.schedules.count).to(eq(8))
+
+ # ensure that we got dfifereent schedules in each page
+ page_two_schedule_ids = page_two.schedules.map(&:schedule_id)
+ page_one.schedules.each do |schedule|
+ expect(page_two_schedule_ids).not_to(include(schedule.schedule_id))
+ end
+ end
+
+ it "roundtrip encodes/decodes memo with payload" do
+ namespace = integration_spec_namespace
+ schedule_id = "schedule_with_encoded_memo_payload-#{SecureRandom.uuid}}"
+ Temporal.create_schedule(
+ namespace,
+ schedule_id,
+ example_schedule,
+ memo: {"schedule_memo" => "schedule memo value"}
+ )
+
+ resp = nil
+ matching_schedule = nil
+
+ # list_schedules is eventually consistent. Wait until our created schedule is returned
+ Timeout.timeout(10) do
+ loop do
+ resp = Temporal.list_schedules(namespace, maximum_page_size: 1000)
+
+ matching_schedule = resp.schedules.find { |s| s.schedule_id == schedule_id }
+ break unless matching_schedule.nil?
+
+ sleep(0.1)
+ end
+ end
+
+ expect(matching_schedule.memo).to(eq({"schedule_memo" => "schedule memo value"}))
+ end
+end
diff --git a/examples/spec/integration/metadata_workflow_spec.rb b/examples/spec/integration/metadata_workflow_spec.rb
new file mode 100644
index 00000000..2fd0b1e6
--- /dev/null
+++ b/examples/spec/integration/metadata_workflow_spec.rb
@@ -0,0 +1,91 @@
+require 'workflows/metadata_workflow'
+
+describe MetadataWorkflow, :integration do
+ subject { described_class }
+
+ it 'gets task queue from running workflow' do
+ workflow_id = 'task-queue-' + SecureRandom.uuid
+ run_id = Temporal.start_workflow(
+ subject,
+ options: { workflow_id: workflow_id }
+ )
+
+ actual_result = Temporal.await_workflow_result(
+ subject,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+
+ expect(actual_result.task_queue).to eq(integration_spec_task_queue)
+ end
+
+ it 'workflow can retrieve its headers' do
+ workflow_id = 'header_test_wf-' + SecureRandom.uuid
+
+ run_id = Temporal.start_workflow(
+ MetadataWorkflow,
+ options: {
+ workflow_id: workflow_id,
+ headers: { 'foo' => 'bar' },
+ }
+ )
+
+ actual_result = Temporal.await_workflow_result(
+ MetadataWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+ expect(actual_result.headers).to eq({ 'foo' => 'bar' })
+ end
+
+ it 'workflow can retrieve its run started at' do
+ workflow_id = 'started_at_test_wf-' + SecureRandom.uuid
+
+ run_id = Temporal.start_workflow(
+ MetadataWorkflow,
+ options: { workflow_id: workflow_id }
+ )
+
+ actual_result = Temporal.await_workflow_result(
+ MetadataWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+ expect(Time.now - actual_result.run_started_at).to be_between(0, 30)
+ end
+
+ it 'gets memo from workflow execution info' do
+ workflow_id = 'memo_execution_test_wf-' + SecureRandom.uuid
+ run_id = Temporal.start_workflow(subject, options: { workflow_id: workflow_id, memo: { 'foo' => 'bar' } })
+
+ actual_result = Temporal.await_workflow_result(
+ subject,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+ expect(actual_result.memo['foo']).to eq('bar')
+
+ expect(Temporal.fetch_workflow_execution_info(
+ integration_spec_namespace, workflow_id, nil
+ ).memo).to eq({ 'foo' => 'bar' })
+ end
+
+ it 'gets memo from workflow context with no memo' do
+ workflow_id = 'memo_context_no_memo_test_wf-' + SecureRandom.uuid
+
+ run_id = Temporal.start_workflow(
+ subject,
+ options: { workflow_id: workflow_id }
+ )
+
+ actual_result = Temporal.await_workflow_result(
+ subject,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+ expect(actual_result.memo).to eq({})
+ expect(Temporal.fetch_workflow_execution_info(
+ integration_spec_namespace, workflow_id, nil
+ ).memo).to eq({})
+ end
+end
diff --git a/examples/spec/integration/named_signal_handler_spec.rb b/examples/spec/integration/named_signal_handler_spec.rb
new file mode 100644
index 00000000..aa5fc559
--- /dev/null
+++ b/examples/spec/integration/named_signal_handler_spec.rb
@@ -0,0 +1,84 @@
+require 'workflows/wait_for_named_signal_workflow'
+
+describe WaitForNamedSignalWorkflow, :integration do
+ let(:receiver_workflow_id) { SecureRandom.uuid }
+
+ context 'when the signal is named' do
+ let(:arg1) { "arg1" }
+ let(:arg2) { 7890.1234 }
+
+ context 'and the workflow has a named signal handler matching the signal name' do
+ let(:signal_name) { "NamedSignal" }
+
+ it 'receives the signal in its named handler' do
+ _, run_id = run_workflow(WaitForNamedSignalWorkflow, signal_name, options: { workflow_id: receiver_workflow_id})
+
+ Temporal.signal_workflow(WaitForNamedSignalWorkflow, signal_name, receiver_workflow_id, run_id, [arg1, arg2])
+
+ result = Temporal.await_workflow_result(
+ WaitForNamedSignalWorkflow,
+ workflow_id: receiver_workflow_id,
+ run_id: run_id,
+ )
+
+ expect(result[:received]).to include({signal_name => [arg1, arg2]})
+ expect(result[:counts]).to include({signal_name => 1})
+ expect(result).to eq(
+ {
+ received: {
+ signal_name => [arg1, arg2],
+ 'catch-all' => [arg1, arg2]
+ },
+ counts: {
+ signal_name => 1,
+ 'catch-all' => 1
+ }
+ }
+ )
+
+ end
+
+ it 'receives the signal in its catch-all signal handler' do
+ _, run_id = run_workflow(WaitForNamedSignalWorkflow, signal_name, options: { workflow_id: receiver_workflow_id})
+
+ Temporal.signal_workflow(WaitForNamedSignalWorkflow, signal_name, receiver_workflow_id, run_id, [arg1, arg2])
+
+ result = Temporal.await_workflow_result(
+ WaitForNamedSignalWorkflow,
+ workflow_id: receiver_workflow_id,
+ run_id: run_id,
+ )
+
+ expect(result[:received]).to include({"catch-all" => [arg1, arg2]})
+ expect(result[:counts]).to include({"catch-all" => 1})
+ end
+ end
+
+ context 'and the workflow does NOT have a named signal handler matching the signal name' do
+ let(:signal_name) { 'doesNOTmatchAsignalHandler' }
+
+ it 'receives the signal in its catch-all signal handler' do
+ _, run_id = run_workflow(WaitForNamedSignalWorkflow, signal_name, options: { workflow_id: receiver_workflow_id})
+
+ Temporal.signal_workflow(WaitForNamedSignalWorkflow, signal_name, receiver_workflow_id, run_id, [arg1, arg2])
+
+ result = Temporal.await_workflow_result(
+ WaitForNamedSignalWorkflow,
+ workflow_id: receiver_workflow_id,
+ run_id: run_id,
+ )
+
+ expect(result).to eq(
+ {
+ received: {
+ 'catch-all' => [arg1, arg2]
+ },
+ counts: {
+ 'catch-all' => 1
+ }
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/examples/spec/integration/parent_close_workflow_spec.rb b/examples/spec/integration/parent_close_workflow_spec.rb
new file mode 100644
index 00000000..44f9348f
--- /dev/null
+++ b/examples/spec/integration/parent_close_workflow_spec.rb
@@ -0,0 +1,55 @@
+require 'workflows/parent_close_workflow'
+
+describe ParentCloseWorkflow, :integration do
+ subject { described_class }
+
+ it 'SlowChildWorkflow terminates if parent_close_policy is TERMINATE' do
+ workflow_id = 'parent_close_test_wf-' + SecureRandom.uuid
+ child_workflow_id = 'slow_child_test_wf-' + SecureRandom.uuid
+
+ run_id = Temporal.start_workflow(
+ ParentCloseWorkflow,
+ child_workflow_id,
+ :terminate,
+ options: { workflow_id: workflow_id }
+ )
+
+ Temporal.await_workflow_result(
+ ParentCloseWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+
+ expect do
+ Temporal.await_workflow_result(
+ SlowChildWorkflow,
+ workflow_id: child_workflow_id,
+ )
+ end.to raise_error(Temporal::WorkflowTerminated)
+ end
+
+ it 'SlowChildWorkflow completes if parent_close_policy is ABANDON' do
+ workflow_id = 'parent_close_test_wf-' + SecureRandom.uuid
+ child_workflow_id = 'slow_child_test_wf-' + SecureRandom.uuid
+
+ run_id = Temporal.start_workflow(
+ ParentCloseWorkflow,
+ child_workflow_id,
+ :abandon,
+ options: { workflow_id: workflow_id }
+ )
+
+ Temporal.await_workflow_result(
+ ParentCloseWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+
+ result = Temporal.await_workflow_result(
+ SlowChildWorkflow,
+ workflow_id: child_workflow_id,
+ )
+
+ expect(result).to eq({ parent_workflow_id: workflow_id })
+ end
+end
diff --git a/examples/spec/integration/parent_id_reuse_spec.rb b/examples/spec/integration/parent_id_reuse_spec.rb
new file mode 100644
index 00000000..e09c8790
--- /dev/null
+++ b/examples/spec/integration/parent_id_reuse_spec.rb
@@ -0,0 +1,109 @@
+require 'workflows/parent_id_reuse_workflow'
+
+describe ParentIdReuseWorkflow, :integration do
+ subject { described_class }
+
+ it 'with :allow, allows duplicates' do
+ workflow_id = 'parent_id_reuse_wf-' + SecureRandom.uuid
+ child_workflow_id = 'child_id_reuse_wf-' + SecureRandom.uuid
+
+ Temporal.start_workflow(
+ ParentIdReuseWorkflow,
+ child_workflow_id,
+ child_workflow_id,
+ false,
+ :allow,
+ options: { workflow_id: workflow_id }
+ )
+
+ Temporal.await_workflow_result(
+ ParentIdReuseWorkflow,
+ workflow_id: workflow_id,
+ )
+ end
+
+ it 'with :reject, rejects duplicates' do
+ workflow_id = 'parent_id_reuse_wf-' + SecureRandom.uuid
+ child_workflow_id = 'child_id_reuse_wf-' + SecureRandom.uuid
+
+ Temporal.start_workflow(
+ ParentIdReuseWorkflow,
+ child_workflow_id,
+ child_workflow_id,
+ false,
+ :reject,
+ options: { workflow_id: workflow_id }
+ )
+
+ expect do
+ Temporal.await_workflow_result(
+ ParentIdReuseWorkflow,
+ workflow_id: workflow_id,
+ )
+ end.to raise_error(Temporal::WorkflowExecutionAlreadyStartedFailure,
+ "The child workflow could not be started - per its workflow_id_reuse_policy, it conflicts with another workflow with the same id: #{child_workflow_id}"
+ )
+ end
+
+ it 'with :reject, does not reject non-duplicates' do
+ workflow_id = 'parent_id_reuse_wf-' + SecureRandom.uuid
+ child_workflow_id_1 = 'child_id_reuse_wf-' + SecureRandom.uuid
+ child_workflow_id_2 = 'child_id_reuse_wf-' + SecureRandom.uuid
+
+ Temporal.start_workflow(
+ ParentIdReuseWorkflow,
+ child_workflow_id_1,
+ child_workflow_id_2,
+ false,
+ :reject,
+ options: { workflow_id: workflow_id }
+ )
+
+ Temporal.await_workflow_result(
+ ParentIdReuseWorkflow,
+ workflow_id: workflow_id,
+ )
+ end
+
+ it 'with :allow_failed, allows duplicates after failure' do
+ workflow_id = 'parent_id_reuse_wf-' + SecureRandom.uuid
+ child_workflow_id = 'child_id_reuse_wf-' + SecureRandom.uuid
+
+ Temporal.start_workflow(
+ ParentIdReuseWorkflow,
+ child_workflow_id,
+ child_workflow_id,
+ true,
+ :allow_failed,
+ options: { workflow_id: workflow_id }
+ )
+
+ Temporal.await_workflow_result(
+ ParentIdReuseWorkflow,
+ workflow_id: workflow_id,
+ )
+ end
+
+ it 'with :allow_failed, rejects duplicates after success' do
+ workflow_id = 'parent_id_reuse_wf-' + SecureRandom.uuid
+ child_workflow_id = 'child_id_reuse_wf-' + SecureRandom.uuid
+
+ Temporal.start_workflow(
+ ParentIdReuseWorkflow,
+ child_workflow_id,
+ child_workflow_id,
+ false,
+ :allow_failed,
+ options: { workflow_id: workflow_id }
+ )
+
+ expect do
+ Temporal.await_workflow_result(
+ ParentIdReuseWorkflow,
+ workflow_id: workflow_id,
+ )
+ end.to raise_error(Temporal::WorkflowExecutionAlreadyStartedFailure,
+ "The child workflow could not be started - per its workflow_id_reuse_policy, it conflicts with another workflow with the same id: #{child_workflow_id}"
+ )
+ end
+end
diff --git a/examples/spec/integration/pause_schedule_spec.rb b/examples/spec/integration/pause_schedule_spec.rb
new file mode 100644
index 00000000..46e8b8ce
--- /dev/null
+++ b/examples/spec/integration/pause_schedule_spec.rb
@@ -0,0 +1,44 @@
+require "temporal/schedule/schedule"
+require "temporal/schedule/calendar"
+require "temporal/schedule/schedule_spec"
+require "temporal/schedule/schedule_policies"
+require "temporal/schedule/schedule_state"
+require "temporal/schedule/start_workflow_action"
+
+describe "Temporal.pause_schedule", :integration do
+ let(:example_schedule) do
+ Temporal::Schedule::Schedule.new(
+ spec: Temporal::Schedule::ScheduleSpec.new(
+ cron_expressions: ["@hourly"],
+ # Set an end time so that the test schedule doesn't run forever
+ end_time: Time.now + 600
+ ),
+ action: Temporal::Schedule::StartWorkflowAction.new(
+ "HelloWorldWorkflow",
+ "Test",
+ options: {
+ task_queue: integration_spec_task_queue
+ }
+ )
+ )
+ end
+
+ it "can pause and unpause a schedule" do
+ namespace = integration_spec_namespace
+ schedule_id = SecureRandom.uuid
+
+ Temporal.create_schedule(namespace, schedule_id, example_schedule)
+ describe_response = Temporal.describe_schedule(namespace, schedule_id)
+ expect(describe_response.schedule.state.paused).to(eq(false))
+
+ Temporal.pause_schedule(namespace, schedule_id)
+
+ describe_response = Temporal.describe_schedule(namespace, schedule_id)
+ expect(describe_response.schedule.state.paused).to(eq(true))
+
+ Temporal.unpause_schedule(namespace, schedule_id)
+
+ describe_response = Temporal.describe_schedule(namespace, schedule_id)
+ expect(describe_response.schedule.state.paused).to(eq(false))
+ end
+end
diff --git a/examples/spec/integration/query_workflow_spec.rb b/examples/spec/integration/query_workflow_spec.rb
new file mode 100644
index 00000000..fb54b0d9
--- /dev/null
+++ b/examples/spec/integration/query_workflow_spec.rb
@@ -0,0 +1,59 @@
+require 'workflows/query_workflow'
+require 'temporal/errors'
+
+describe QueryWorkflow, :integration do
+ subject { described_class }
+
+ it 'returns the correct result for the queries' do
+ workflow_id, run_id = run_workflow(described_class)
+
+ # Query with nil workflow class
+ expect(Temporal.query_workflow(nil, 'state', workflow_id, run_id))
+ .to eq 'started'
+
+ # Query with arbitrary args
+ expect(Temporal.query_workflow(described_class, 'state', workflow_id, run_id,
+ 'upcase', 'ignored', 'reverse'))
+ .to eq 'DETRATS'
+
+ # Query with no args
+ expect(Temporal.query_workflow(described_class, 'signal_count', workflow_id, run_id))
+ .to eq 0
+
+ # Query with unregistered handler
+ expect { Temporal.query_workflow(described_class, 'unknown_query', workflow_id, run_id) }
+ .to raise_error(Temporal::QueryFailed, "Workflow did not register a handler for 'unknown_query'. KnownQueryTypes=[__stack_trace, state, signal_count]")
+
+ # Query built-in stack trace handler, looking for a couple of key parts of the contents
+ stack_trace = Temporal.query_workflow(described_class, '__stack_trace', workflow_id, run_id)
+ expect(stack_trace).to start_with "Fiber count: 1\n\n"
+ expect(stack_trace).to include "/examples/workflows/query_workflow.rb:"
+
+ Temporal.signal_workflow(described_class, 'make_progress', workflow_id, run_id)
+
+ # Query for updated signal_count with an unsatisfied reject condition
+ expect(Temporal.query_workflow(described_class, 'signal_count', workflow_id, run_id, query_reject_condition: :not_open))
+ .to eq 1
+
+ Temporal.signal_workflow(described_class, 'finish', workflow_id, run_id)
+ wait_for_workflow_completion(workflow_id, run_id)
+
+ # Repeating original query scenarios above, expecting updated state and signal results
+ expect(Temporal.query_workflow(nil, 'state', workflow_id, run_id))
+ .to eq 'finished'
+
+ expect(Temporal.query_workflow(described_class, 'state', workflow_id, run_id,
+ 'upcase', 'ignored', 'reverse'))
+ .to eq 'DEHSINIF'
+
+ expect(Temporal.query_workflow(described_class, 'signal_count', workflow_id, run_id))
+ .to eq 2
+
+ expect { Temporal.query_workflow(described_class, 'unknown_query', workflow_id, run_id) }
+ .to raise_error(Temporal::QueryFailed, "Workflow did not register a handler for 'unknown_query'. KnownQueryTypes=[__stack_trace, state, signal_count]")
+
+ # Now that the workflow is completed, test a query with a reject condition satisfied
+ expect { Temporal.query_workflow(described_class, 'state', workflow_id, run_id, query_reject_condition: :not_open) }
+ .to raise_error(Temporal::QueryFailed, 'Query rejected: status WORKFLOW_EXECUTION_STATUS_COMPLETED')
+ end
+end
diff --git a/examples/spec/integration/register_namespace_spec.rb b/examples/spec/integration/register_namespace_spec.rb
new file mode 100644
index 00000000..5380895b
--- /dev/null
+++ b/examples/spec/integration/register_namespace_spec.rb
@@ -0,0 +1,36 @@
+describe 'Temporal.register_namespace' do
+ it 'can register a new namespace' do
+ # have to generate a new namespace on each run because currently can't delete namespaces
+ name = "test_namespace_#{SecureRandom.uuid}"
+ description = 'this is the description'
+ retention_period = 30
+ data = { test: 'value' }
+
+ Temporal.register_namespace(name, description, retention_period: retention_period, data: data)
+
+ # fetch the namespace from Temporal and check it exists and has the correct settings
+ # (need to wait a few seconds for temporal to catch up so try a few times)
+ attempts = 0
+ while attempts < 30 do
+ attempts += 1
+
+ begin
+ result = Temporal.describe_namespace(name)
+
+ expect(result.namespace_info.name).to eq(name)
+ expect(result.namespace_info.data).to eq(data)
+ expect(result.config.workflow_execution_retention_ttl.seconds).to eq(retention_period * 24 * 60 * 60)
+ break
+ rescue GRPC::NotFound
+ sleep 0.5
+ end
+ end
+ end
+
+ it 'errors if attempting to register a namespace with the same name' do
+ name = "test_namespace_#{SecureRandom.uuid}"
+ Temporal.register_namespace(name)
+
+ expect {Temporal.register_namespace(name)}.to raise_error(Temporal::NamespaceAlreadyExistsFailure, 'Namespace already exists.')
+ end
+end
diff --git a/examples/spec/integration/reset_workflow_spec.rb b/examples/spec/integration/reset_workflow_spec.rb
new file mode 100644
index 00000000..7305fae4
--- /dev/null
+++ b/examples/spec/integration/reset_workflow_spec.rb
@@ -0,0 +1,163 @@
+require 'workflows/hello_world_workflow'
+require 'workflows/query_workflow'
+require 'temporal/reset_reapply_type'
+
+describe 'Temporal.reset_workflow', :integration do
+ it 'can reset a closed workflow to the beginning' do
+ workflow_id = SecureRandom.uuid
+ original_run_id = Temporal.start_workflow(
+ HelloWorldWorkflow,
+ 'Test',
+ options: { workflow_id: workflow_id }
+ )
+
+ original_result = Temporal.await_workflow_result(
+ HelloWorldWorkflow,
+ workflow_id: workflow_id,
+ run_id: original_run_id
+ )
+ expect(original_result).to eq('Hello World, Test')
+
+ new_run_id = Temporal.reset_workflow(
+ integration_spec_namespace,
+ workflow_id,
+ original_run_id,
+ strategy: Temporal::ResetStrategy::FIRST_WORKFLOW_TASK
+ )
+
+ new_result = Temporal.await_workflow_result(
+ HelloWorldWorkflow,
+ workflow_id: workflow_id,
+ run_id: new_run_id,
+ )
+ expect(new_result).to eq('Hello World, Test')
+ end
+
+ def reset_hello_world_workflow_twice(workflow_id, original_run_id, request_id:)
+ 2.times.map do
+ new_run_id = Temporal.reset_workflow(
+ integration_spec_namespace,
+ workflow_id,
+ original_run_id,
+ strategy: Temporal::ResetStrategy::FIRST_WORKFLOW_TASK,
+ request_id: request_id
+ )
+
+ new_result = Temporal.await_workflow_result(
+ HelloWorldWorkflow,
+ workflow_id: workflow_id,
+ run_id: new_run_id,
+ )
+ expect(new_result).to eq('Hello World, Test')
+
+ new_run_id
+ end
+ end
+
+ it 'can repeatedly reset the same closed workflow to the beginning' do
+ workflow_id = SecureRandom.uuid
+ original_run_id = Temporal.start_workflow(
+ HelloWorldWorkflow,
+ 'Test',
+ options: { workflow_id: workflow_id }
+ )
+
+ original_result = Temporal.await_workflow_result(
+ HelloWorldWorkflow,
+ workflow_id: workflow_id,
+ run_id: original_run_id,
+ )
+ expect(original_result).to eq('Hello World, Test')
+
+ new_run_ids = reset_hello_world_workflow_twice(
+ workflow_id,
+ original_run_id,
+ # This causes the request_id to be generated with a random value:
+ request_id: nil
+ )
+
+ # Each Reset request should have resulted in a unique workflow execution
+ expect(new_run_ids.uniq.size).to eq(new_run_ids.size)
+ end
+
+ it 'can deduplicate reset requests' do
+ workflow_id = SecureRandom.uuid
+ original_run_id = Temporal.start_workflow(
+ HelloWorldWorkflow,
+ 'Test',
+ options: { workflow_id: workflow_id }
+ )
+
+ original_result = Temporal.await_workflow_result(
+ HelloWorldWorkflow,
+ workflow_id: workflow_id,
+ run_id: original_run_id,
+ )
+ expect(original_result).to eq('Hello World, Test')
+
+ reset_request_id = SecureRandom.uuid
+ new_run_ids = reset_hello_world_workflow_twice(
+ workflow_id,
+ original_run_id,
+ request_id: reset_request_id
+ )
+
+ # Each Reset request except the first should have been deduplicated
+ expect(new_run_ids.uniq.size).to eq(1)
+ end
+
+ def start_query_workflow_and_signal_three_times
+ workflow_id = SecureRandom.uuid
+ run_id = Temporal.start_workflow(
+ QueryWorkflow,
+ options: { workflow_id: workflow_id }
+ )
+
+ expect(Temporal.query_workflow(QueryWorkflow, 'signal_count', workflow_id, run_id))
+ .to eq 0
+
+ Temporal.signal_workflow(QueryWorkflow, 'make_progress', workflow_id, run_id)
+ Temporal.signal_workflow(QueryWorkflow, 'make_progress', workflow_id, run_id)
+ Temporal.signal_workflow(QueryWorkflow, 'make_progress', workflow_id, run_id)
+
+ expect(Temporal.query_workflow(QueryWorkflow, 'signal_count', workflow_id, run_id))
+ .to eq 3
+
+ { workflow_id: workflow_id, run_id: run_id }
+ end
+
+ it 'can reapply signals when resetting a workflow' do
+ workflow_id, original_run_id = start_query_workflow_and_signal_three_times.values_at(:workflow_id, :run_id)
+
+ new_run_id = Temporal.reset_workflow(
+ integration_spec_namespace,
+ workflow_id,
+ original_run_id,
+ strategy: Temporal::ResetStrategy::FIRST_WORKFLOW_TASK,
+ reset_reapply_type: Temporal::ResetReapplyType::SIGNAL
+ )
+
+ expect(Temporal.query_workflow(QueryWorkflow, 'signal_count', workflow_id, new_run_id))
+ .to eq 3
+
+ Temporal.terminate_workflow(workflow_id, run_id: new_run_id)
+ end
+
+ it 'can skip reapplying signals when resetting a workflow' do
+ workflow_id, original_run_id = start_query_workflow_and_signal_three_times.values_at(:workflow_id, :run_id)
+
+ new_run_id = Temporal.reset_workflow(
+ integration_spec_namespace,
+ workflow_id,
+ original_run_id,
+ strategy: Temporal::ResetStrategy::FIRST_WORKFLOW_TASK,
+ reset_reapply_type: Temporal::ResetReapplyType::NONE
+ )
+
+ expect(Temporal.query_workflow(QueryWorkflow, 'signal_count', workflow_id, new_run_id))
+ .to eq 0
+
+ Temporal.terminate_workflow(workflow_id, run_id: new_run_id)
+ end
+end
+
diff --git a/examples/spec/integration/schedule_child_workflow_spec.rb b/examples/spec/integration/schedule_child_workflow_spec.rb
new file mode 100644
index 00000000..a49f918a
--- /dev/null
+++ b/examples/spec/integration/schedule_child_workflow_spec.rb
@@ -0,0 +1,37 @@
+require 'workflows/schedule_child_workflow'
+require 'workflows/hello_world_workflow'
+
+describe ScheduleChildWorkflow, :integration do
+ let(:cron_schedule) { "*/6 * * * *" }
+
+ it 'schedules a child workflow with a given cron schedule' do
+ child_workflow_id = 'schedule_child_test_wf-' + SecureRandom.uuid
+ workflow_id, run_id = run_workflow(
+ described_class,
+ child_workflow_id,
+ cron_schedule,
+ options: {
+ timeouts: { execution: 10 }
+ }
+ )
+
+ wait_for_workflow_completion(workflow_id, run_id)
+ parent_history = fetch_history(workflow_id, run_id)
+
+ child_workflow_event = parent_history.history.events.detect do |event|
+ event.event_type == :EVENT_TYPE_START_CHILD_WORKFLOW_EXECUTION_INITIATED
+ end
+ expect(
+ child_workflow_event.start_child_workflow_execution_initiated_event_attributes.cron_schedule
+ ).to eq(cron_schedule)
+
+ # Expecting the child workflow to terminate as a result of the parent close policy
+ expect do
+ Temporal.await_workflow_result(
+ HelloWorldWorkflow,
+ workflow_id: child_workflow_id
+ )
+ end.to raise_error(Temporal::WorkflowTerminated)
+
+ end
+end
diff --git a/examples/spec/integration/search_attributes_spec.rb b/examples/spec/integration/search_attributes_spec.rb
new file mode 100644
index 00000000..b9f67da1
--- /dev/null
+++ b/examples/spec/integration/search_attributes_spec.rb
@@ -0,0 +1,70 @@
+require 'temporal/errors'
+
+describe 'search attributes' do
+ let(:attribute_1) { 'Age' }
+ let(:attribute_2) { 'Name' }
+
+ def cleanup
+ custom_attributes = Temporal.list_custom_search_attributes
+ custom_attributes.keys.intersection([attribute_1, attribute_2]).each do |attribute|
+ Temporal.remove_custom_search_attributes(attribute)
+ end
+ end
+
+ before do
+ cleanup
+ end
+
+ after do
+ cleanup
+ end
+
+ # Depending on the visibility storage backend of the server, recreating a search attribute
+ # is either ignored so long as the tpe is the same (Elastic Search) or it raises
+ # an error (SQL). This function ensures consistent state upon exit.
+ def safe_add(attributes)
+ begin
+ Temporal.add_custom_search_attributes(attributes)
+ rescue => e
+ # This won't always throw but when it does it needs to be of this type
+ expect(e).to be_instance_of(Temporal::SearchAttributeAlreadyExistsFailure)
+ end
+ end
+
+ it 'add' do
+ safe_add({ attribute_1 => :int, attribute_2 => :keyword })
+
+ custom_attributes = Temporal.list_custom_search_attributes
+ expect(custom_attributes).to include(attribute_1 => :int)
+ expect(custom_attributes).to include(attribute_2 => :keyword)
+ end
+
+ it 'add duplicate fails' do
+ safe_add({ attribute_1 => :int })
+
+ # This, however, will always throw
+ expect do
+ Temporal.add_custom_search_attributes(
+ {
+ attribute_1 => :int
+ }
+ )
+ end.to raise_error(Temporal::SearchAttributeAlreadyExistsFailure)
+ end
+
+ it 'remove' do
+ safe_add({ attribute_1 => :int, attribute_2 => :keyword })
+
+ Temporal.remove_custom_search_attributes(attribute_1, attribute_2)
+
+ custom_attributes = Temporal.list_custom_search_attributes
+ expect(custom_attributes).not_to include(attribute_1 => :int)
+ expect(custom_attributes).not_to include(attribute_2 => :keyword)
+ end
+
+ it 'remove non-existent fails' do
+ expect do
+ Temporal.remove_custom_search_attributes(attribute_1, attribute_2)
+ end.to raise_error(Temporal::NotFoundFailure)
+ end
+end
diff --git a/examples/spec/integration/signal_spec.rb b/examples/spec/integration/signal_spec.rb
new file mode 100644
index 00000000..4789219a
--- /dev/null
+++ b/examples/spec/integration/signal_spec.rb
@@ -0,0 +1,40 @@
+require 'securerandom'
+require 'workflows/signal_workflow'
+
+describe 'signal' do
+ it 'all signals process' do
+ workflow_id = SecureRandom.uuid
+ expected_score = 7
+ run_id = Temporal.start_workflow(
+ SignalWorkflow,
+ 1, # seconds
+ options: {
+ workflow_id: workflow_id,
+ signal_name: 'score',
+ signal_input: expected_score,
+ timeouts: { execution: 10 }
+ }
+ )
+
+ loop do
+ value = SecureRandom.rand(10)
+
+ begin
+ Temporal.signal_workflow(SignalWorkflow, 'score', workflow_id, run_id, value)
+ rescue StandardError
+ # Keep going until there's an error such as the workflow finishing
+ break
+ end
+ expected_score += value
+ sleep 0.01
+ end
+
+ result = Temporal.await_workflow_result(
+ SignalWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id
+ )
+
+ expect(result).to eq(expected_score)
+ end
+end
diff --git a/examples/spec/integration/signal_with_start_spec.rb b/examples/spec/integration/signal_with_start_spec.rb
new file mode 100644
index 00000000..2971404c
--- /dev/null
+++ b/examples/spec/integration/signal_with_start_spec.rb
@@ -0,0 +1,75 @@
+require 'workflows/signal_with_start_workflow'
+
+describe 'signal with start' do
+
+ it 'signals at workflow start time' do
+ workflow_id = SecureRandom.uuid
+ run_id = Temporal.start_workflow(
+ SignalWithStartWorkflow,
+ 'signal_name',
+ options: {
+ workflow_id: workflow_id,
+ signal_name: 'signal_name',
+ signal_input: 'expected value',
+ timeouts: { execution: 10 },
+ }
+ )
+
+ result = Temporal.await_workflow_result(
+ SignalWithStartWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+
+ expect(result).to eq('expected value') # the workflow should return the signal value
+ end
+
+ it 'signals at workflow start time with name only' do
+ workflow_id = SecureRandom.uuid
+ run_id = Temporal.start_workflow(
+ SignalWithStartWorkflow,
+ 'signal_name',
+ options: {
+ workflow_id: workflow_id,
+ signal_name: 'signal_name',
+ timeouts: { execution: 10 },
+ }
+ )
+
+ result = Temporal.await_workflow_result(
+ SignalWithStartWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+
+ expect(result).to eq(nil) # the workflow should return the signal value
+ end
+
+ it 'does not launch a new workflow when signaling a running workflow through signal_with_start' do
+ workflow_id = SecureRandom.uuid
+ run_id = Temporal.start_workflow(
+ SignalWithStartWorkflow,
+ 'signal_name',
+ options: {
+ workflow_id: workflow_id,
+ signal_name: 'signal_name',
+ signal_input: 'expected value',
+ timeouts: { execution: 10 },
+ }
+ )
+
+ second_run_id = Temporal.start_workflow(
+ SignalWithStartWorkflow,
+ 'signal_name',
+ options: {
+ workflow_id: workflow_id,
+ signal_name: 'signal_name',
+ signal_input: 'expected value',
+ timeouts: { execution: 10 },
+ }
+ )
+
+ # If the run ids are the same, then we didn't start a new workflow
+ expect(second_run_id).to eq(run_id)
+ end
+end
diff --git a/examples/spec/integration/start_child_workflow_workflow_spec.rb b/examples/spec/integration/start_child_workflow_workflow_spec.rb
new file mode 100644
index 00000000..e4e1be9e
--- /dev/null
+++ b/examples/spec/integration/start_child_workflow_workflow_spec.rb
@@ -0,0 +1,24 @@
+require 'workflows/start_child_workflow_workflow'
+
+describe StartChildWorkflowWorkflow, :integration do
+ subject { described_class }
+
+ it 'StartChildWorkflowWorkflow returns the child workflows information on the start future' do
+ workflow_id = 'parent_close_test_wf-' + SecureRandom.uuid
+ child_workflow_id = 'slow_child_test_wf-' + SecureRandom.uuid
+
+ run_id = Temporal.start_workflow(
+ StartChildWorkflowWorkflow,
+ child_workflow_id,
+ options: { workflow_id: workflow_id }
+ )
+
+ result = Temporal.await_workflow_result(
+ StartChildWorkflowWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+
+ expect(result.workflow_id).to start_with(child_workflow_id)
+ end
+end
diff --git a/examples/spec/integration/start_workflow_spec.rb b/examples/spec/integration/start_workflow_spec.rb
new file mode 100644
index 00000000..8cf6a46c
--- /dev/null
+++ b/examples/spec/integration/start_workflow_spec.rb
@@ -0,0 +1,96 @@
+require 'workflows/hello_world_workflow'
+require 'workflows/long_workflow'
+
+describe 'Temporal.start_workflow', :integration do
+ let(:workflow_id) { SecureRandom.uuid }
+
+ it 'starts a workflow using a class reference' do
+ run_id = Temporal.start_workflow(HelloWorldWorkflow, 'Test', options: {
+ workflow_id: workflow_id
+ })
+
+ result = Temporal.await_workflow_result(
+ HelloWorldWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id
+ )
+
+ expect(result).to eq('Hello World, Test')
+ end
+
+ it 'starts a workflow using a string reference' do
+ run_id = Temporal.start_workflow('HelloWorldWorkflow', 'Test', options: {
+ workflow_id: workflow_id,
+ namespace: integration_spec_namespace,
+ task_queue: integration_spec_task_queue
+ })
+
+ result = Temporal.await_workflow_result(
+ 'HelloWorldWorkflow',
+ workflow_id: workflow_id,
+ run_id: run_id,
+ namespace: integration_spec_namespace
+ )
+
+ expect(result).to eq('Hello World, Test')
+ end
+
+ it 'rejects duplicate workflow ids based on workflow_id_reuse_policy' do
+ # Run it once...
+ run_id = Temporal.start_workflow(HelloWorldWorkflow, 'Test', options: {
+ workflow_id: workflow_id,
+ })
+
+ result = Temporal.await_workflow_result(
+ HelloWorldWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id
+ )
+
+ expect(result).to eq('Hello World, Test')
+
+ # And again, allowing duplicates...
+ run_id = Temporal.start_workflow(HelloWorldWorkflow, 'Test', options: {
+ workflow_id: workflow_id,
+ workflow_id_reuse_policy: :allow
+ })
+
+ Temporal.await_workflow_result(
+ HelloWorldWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id
+ )
+
+ # And again, rejecting duplicates...
+ expect do
+ Temporal.start_workflow(HelloWorldWorkflow, 'Test', options: {
+ workflow_id: workflow_id,
+ workflow_id_reuse_policy: :reject
+ })
+ end.to raise_error(Temporal::WorkflowExecutionAlreadyStartedFailure)
+ end
+
+ it 'terminates duplicate workflow ids based on workflow_id_reuse_policy' do
+ run_id_1 = Temporal.start_workflow(LongWorkflow, options: {
+ workflow_id: workflow_id,
+ workflow_id_reuse_policy: :terminate_if_running
+ })
+
+ run_id_2 = Temporal.start_workflow(LongWorkflow, options: {
+ workflow_id: workflow_id,
+ workflow_id_reuse_policy: :terminate_if_running
+ })
+
+ execution_1 = Temporal.fetch_workflow_execution_info(
+ integration_spec_namespace,
+ workflow_id,
+ run_id_1)
+ execution_2 = Temporal.fetch_workflow_execution_info(
+ integration_spec_namespace,
+ workflow_id,
+ run_id_2)
+
+ expect(execution_1.status).to eq(Temporal::Workflow::Status::TERMINATED)
+ expect(execution_2.status).to eq(Temporal::Workflow::Status::RUNNING)
+ end
+end
diff --git a/examples/spec/integration/terminate_workflow_spec.rb b/examples/spec/integration/terminate_workflow_spec.rb
index 4e6137a7..19c88cdd 100644
--- a/examples/spec/integration/terminate_workflow_spec.rb
+++ b/examples/spec/integration/terminate_workflow_spec.rb
@@ -7,7 +7,7 @@
TimeoutWorkflow,
1, # sleep long enough to be sure I can cancel in time.
1,
- { options: { workflow_id: workflow_id } },
+ options: { workflow_id: workflow_id },
)
Temporal.terminate_workflow(workflow_id)
diff --git a/examples/spec/integration/trigger_schedule_spec.rb b/examples/spec/integration/trigger_schedule_spec.rb
new file mode 100644
index 00000000..f90c8f0b
--- /dev/null
+++ b/examples/spec/integration/trigger_schedule_spec.rb
@@ -0,0 +1,49 @@
+require "timeout"
+require "temporal/schedule/schedule"
+require "temporal/schedule/calendar"
+require "temporal/schedule/schedule_spec"
+require "temporal/schedule/schedule_policies"
+require "temporal/schedule/schedule_state"
+require "temporal/schedule/start_workflow_action"
+
+describe "Temporal.trigger_schedule", :integration do
+ let(:example_schedule) do
+ Temporal::Schedule::Schedule.new(
+ spec: Temporal::Schedule::ScheduleSpec.new(
+ # Set this to a date in the future to avoid triggering the schedule immediately
+ calendars: [Temporal::Schedule::Calendar.new(year: "2055", month: "12", day_of_month: "25")]
+ ),
+ action: Temporal::Schedule::StartWorkflowAction.new(
+ "HelloWorldWorkflow",
+ "Test",
+ options: {
+ task_queue: integration_spec_task_queue
+ }
+ )
+ )
+ end
+
+ it "can trigger a schedule to run immediately" do
+ namespace = integration_spec_namespace
+ schedule_id = SecureRandom.uuid
+
+ Temporal.create_schedule(namespace, schedule_id, example_schedule)
+ describe_response = Temporal.describe_schedule(namespace, schedule_id)
+ expect(describe_response.info.recent_actions.size).to(eq(0))
+
+ # Trigger the schedule and wait to see that it actually ran
+ Temporal.trigger_schedule(namespace, schedule_id, overlap_policy: :buffer_one)
+
+ Timeout.timeout(10) do
+ loop do
+ describe_response = Temporal.describe_schedule(namespace, schedule_id)
+
+ break if describe_response.info && describe_response.info.recent_actions.size >= 1
+
+ sleep(0.5)
+ end
+ end
+
+ expect(describe_response.info.recent_actions.size).to(eq(1))
+ end
+end
diff --git a/examples/spec/integration/update_schedule_spec.rb b/examples/spec/integration/update_schedule_spec.rb
new file mode 100644
index 00000000..5623894d
--- /dev/null
+++ b/examples/spec/integration/update_schedule_spec.rb
@@ -0,0 +1,103 @@
+require "temporal/errors"
+require "temporal/schedule/schedule"
+require "temporal/schedule/schedule_spec"
+require "temporal/schedule/schedule_policies"
+require "temporal/schedule/schedule_state"
+require "temporal/schedule/start_workflow_action"
+
+describe "Temporal.update_schedule", :integration do
+ let(:example_schedule) do
+ Temporal::Schedule::Schedule.new(
+ spec: Temporal::Schedule::ScheduleSpec.new(
+ cron_expressions: ["@hourly"],
+ jitter: 30,
+ # Set an end time so that the test schedule doesn't run forever
+ end_time: Time.now + 600
+ ),
+ action: Temporal::Schedule::StartWorkflowAction.new(
+ "HelloWorldWorkflow",
+ "Test",
+ options: {
+ task_queue: integration_spec_task_queue
+ }
+ ),
+ policies: Temporal::Schedule::SchedulePolicies.new(
+ overlap_policy: :buffer_one
+ ),
+ state: Temporal::Schedule::ScheduleState.new(
+ notes: "Created by integration test"
+ )
+ )
+ end
+
+ let(:updated_schedule) do
+ Temporal::Schedule::Schedule.new(
+ spec: Temporal::Schedule::ScheduleSpec.new(
+ cron_expressions: ["@hourly"],
+ jitter: 500,
+ # Set an end time so that the test schedule doesn't run forever
+ end_time: Time.now + 600
+ ),
+ action: Temporal::Schedule::StartWorkflowAction.new(
+ "HelloWorldWorkflow",
+ "UpdatedInput",
+ options: {
+ task_queue: integration_spec_task_queue
+ }
+ ),
+ policies: Temporal::Schedule::SchedulePolicies.new(
+ overlap_policy: :buffer_all
+ ),
+ state: Temporal::Schedule::ScheduleState.new(
+ notes: "Updated by integration test"
+ )
+ )
+ end
+
+ it "can update schedules" do
+ namespace = integration_spec_namespace
+ schedule_id = SecureRandom.uuid
+
+ Temporal.create_schedule(namespace, schedule_id, example_schedule)
+
+ describe_response = Temporal.describe_schedule(namespace, schedule_id)
+ expect(describe_response.schedule.spec.jitter.seconds).to(eq(30))
+ expect(describe_response.schedule.policies.overlap_policy).to(eq(:SCHEDULE_OVERLAP_POLICY_BUFFER_ONE))
+ expect(describe_response.schedule.action.start_workflow.workflow_type.name).to(eq("HelloWorldWorkflow"))
+ expect(describe_response.schedule.state.notes).to(eq("Created by integration test"))
+
+ Temporal.update_schedule(namespace, schedule_id, updated_schedule)
+ updated_describe = Temporal.describe_schedule(namespace, schedule_id)
+ expect(updated_describe.schedule.spec.jitter.seconds).to(eq(500))
+ expect(updated_describe.schedule.policies.overlap_policy).to(eq(:SCHEDULE_OVERLAP_POLICY_BUFFER_ALL))
+ expect(updated_describe.schedule.state.notes).to(eq("Updated by integration test"))
+ end
+
+ it "does not update if conflict token doesnt match" do
+ namespace = integration_spec_namespace
+ schedule_id = SecureRandom.uuid
+
+ initial_response = Temporal.create_schedule(namespace, schedule_id, example_schedule)
+
+ # Update the schedule but pass the incorrect token
+ Temporal.update_schedule(namespace, schedule_id, updated_schedule, conflict_token: "invalid token")
+
+ # The schedule should not have been updated (we don't get an error message from the server in this case)
+ describe_response = Temporal.describe_schedule(namespace, schedule_id)
+ expect(describe_response.schedule.spec.jitter.seconds).to(eq(30))
+
+ # If we pass the right conflict token the update should be applied
+ Temporal.update_schedule(namespace, schedule_id, updated_schedule, conflict_token: initial_response.conflict_token)
+ updated_describe = Temporal.describe_schedule(namespace, schedule_id)
+ expect(updated_describe.schedule.spec.jitter.seconds).to(eq(500))
+ end
+
+ it "raises a NotFoundFailure if a schedule doesn't exist" do
+ namespace = integration_spec_namespace
+
+ expect do
+ Temporal.update_schedule(namespace, "some-invalid-schedule-id", updated_schedule)
+ end
+ .to(raise_error(Temporal::NotFoundFailure))
+ end
+end
diff --git a/examples/spec/integration/upsert_search_attributes_spec.rb b/examples/spec/integration/upsert_search_attributes_spec.rb
new file mode 100644
index 00000000..99c20f9f
--- /dev/null
+++ b/examples/spec/integration/upsert_search_attributes_spec.rb
@@ -0,0 +1,51 @@
+require 'workflows/upsert_search_attributes_workflow'
+require 'time'
+
+describe 'Temporal::Workflow::Context.upsert_search_attributes', :integration do
+ it 'can upsert a search attribute and then retrieve it' do
+ workflow_id = 'upsert_search_attributes_test_wf-' + SecureRandom.uuid
+ expected_binary_checksum = `git show HEAD -s --format=%H`.strip
+
+ expected_added_attributes = {
+ 'CustomStringField' => 'moo',
+ 'CustomBoolField' => true,
+ 'CustomDoubleField' => 3.14,
+ 'CustomIntField' => 0,
+ 'CustomDatetimeField' => Time.now.utc.iso8601,
+ }
+
+ run_id = Temporal.start_workflow(
+ UpsertSearchAttributesWorkflow,
+ string_value: expected_added_attributes['CustomStringField'],
+ bool_value: expected_added_attributes['CustomBoolField'],
+ float_value: expected_added_attributes['CustomDoubleField'],
+ int_value: expected_added_attributes['CustomIntField'],
+ time_value: expected_added_attributes['CustomDatetimeField'],
+ options: {
+ workflow_id: workflow_id,
+ },
+ )
+
+ added_attributes = Temporal.await_workflow_result(
+ UpsertSearchAttributesWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+ expect(added_attributes).to eq(expected_added_attributes)
+
+ # These attributes are set for the worker in bin/worker
+ expected_attributes = {
+ # Contains a list of all binary checksums seen for this workflow execution
+ 'BinaryChecksums' => [expected_binary_checksum]
+ }.merge(expected_added_attributes)
+
+ execution_info = Temporal.fetch_workflow_execution_info(
+ integration_spec_namespace,
+ workflow_id,
+ nil
+ )
+ # Temporal might add new built-in search attributes, so just assert that
+ # the expected attributes are a subset of the actual attributes:
+ expect(execution_info.search_attributes).to be >= expected_attributes
+ end
+end
diff --git a/examples/spec/integration/wait_for_external_signal_workflow_spec.rb b/examples/spec/integration/wait_for_external_signal_workflow_spec.rb
new file mode 100644
index 00000000..35c1fd36
--- /dev/null
+++ b/examples/spec/integration/wait_for_external_signal_workflow_spec.rb
@@ -0,0 +1,82 @@
+require 'workflows/wait_for_external_signal_workflow'
+require 'workflows/send_signal_to_external_workflow'
+
+describe WaitForExternalSignalWorkflow do
+ let(:signal_name) { "signal_name" }
+ let(:receiver_workflow_id) { SecureRandom.uuid }
+ let(:sender_workflow_id) { SecureRandom.uuid }
+
+ context 'when the workflows succeed then' do
+ it 'receives signal from an external workflow only once' do
+ run_id = Temporal.start_workflow(
+ WaitForExternalSignalWorkflow,
+ signal_name,
+ options: {workflow_id: receiver_workflow_id}
+ )
+
+ Temporal.start_workflow(
+ SendSignalToExternalWorkflow,
+ signal_name,
+ receiver_workflow_id
+ )
+
+ result = Temporal.await_workflow_result(
+ WaitForExternalSignalWorkflow,
+ workflow_id: receiver_workflow_id,
+ run_id: run_id,
+ )
+
+ expect(result).to eq(
+ {
+ received: {
+ signal_name => ["arg1", "arg2"]
+ },
+ counts: {
+ signal_name => 1
+ }
+ }
+ )
+ end
+
+ it 'returns :success to the sending workflow' do
+ Temporal.start_workflow(
+ WaitForExternalSignalWorkflow,
+ signal_name,
+ options: {workflow_id: receiver_workflow_id}
+ )
+
+ run_id = Temporal.start_workflow(
+ SendSignalToExternalWorkflow,
+ signal_name,
+ receiver_workflow_id,
+ options: {workflow_id: sender_workflow_id}
+ )
+
+ result = Temporal.await_workflow_result(
+ SendSignalToExternalWorkflow,
+ workflow_id: sender_workflow_id,
+ run_id: run_id,
+ )
+
+ expect(result).to eq(:success)
+ end
+ end
+
+ context 'when the workflows fail' do
+ it 'correctly handles failure to deliver' do
+ run_id = Temporal.start_workflow(
+ SendSignalToExternalWorkflow,
+ signal_name,
+ receiver_workflow_id,
+ options: {workflow_id: sender_workflow_id})
+
+ result = Temporal.await_workflow_result(
+ SendSignalToExternalWorkflow,
+ workflow_id: sender_workflow_id,
+ run_id: run_id,
+ )
+
+ expect(result).to eq(:failed)
+ end
+ end
+end
diff --git a/examples/spec/integration/wait_for_workflow_spec.rb b/examples/spec/integration/wait_for_workflow_spec.rb
new file mode 100644
index 00000000..d5feeee6
--- /dev/null
+++ b/examples/spec/integration/wait_for_workflow_spec.rb
@@ -0,0 +1,28 @@
+require 'workflows/wait_for_workflow'
+
+describe WaitForWorkflow do
+
+ it 'signals at workflow start time' do
+ workflow_id = SecureRandom.uuid
+ run_id = Temporal.start_workflow(
+ WaitForWorkflow,
+ 10, # number of echo activities to run
+ 2, # max activity parallelism
+ 'signal_name',
+ options: { workflow_id: workflow_id }
+ )
+
+ Temporal.signal_workflow(WaitForWorkflow, 'signal_name', workflow_id, run_id)
+
+ result = Temporal.await_workflow_result(
+ WaitForWorkflow,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ )
+
+ expect(result.length).to eq(3)
+ expect(result[:signal]).to eq(true)
+ expect(result[:timer]).to eq(true)
+ expect(result[:activity]).to eq(true)
+ end
+end
\ No newline at end of file
diff --git a/examples/spec/replay/histories/signal_with_start.binpb b/examples/spec/replay/histories/signal_with_start.binpb
new file mode 100644
index 00000000..7d7bf89c
Binary files /dev/null and b/examples/spec/replay/histories/signal_with_start.binpb differ
diff --git a/examples/spec/replay/histories/signal_with_start.json b/examples/spec/replay/histories/signal_with_start.json
new file mode 100644
index 00000000..fe301a04
--- /dev/null
+++ b/examples/spec/replay/histories/signal_with_start.json
@@ -0,0 +1,361 @@
+{
+ "events": [
+ {
+ "eventId": "1",
+ "eventTime": "2024-05-28T02:46:26.852786129Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_EXECUTION_STARTED",
+ "taskId": "31457280",
+ "workflowExecutionStartedEventAttributes": {
+ "workflowType": {
+ "name": "SignalWithStartWorkflow"
+ },
+ "taskQueue": {
+ "name": "general",
+ "kind": "TASK_QUEUE_KIND_NORMAL"
+ },
+ "input": {
+ "payloads": [
+ {
+ "metadata": {
+ "encoding": "anNvbi9wbGFpbg=="
+ },
+ "data": "ImhpdCI="
+ }
+ ]
+ },
+ "workflowExecutionTimeout": "30s",
+ "workflowRunTimeout": "30s",
+ "workflowTaskTimeout": "10s",
+ "originalExecutionRunId": "c6e8de96-4e18-409d-8e60-38d58f2f11b9",
+ "identity": "4514@DESKTOP-JRJDVRG\n",
+ "firstExecutionRunId": "c6e8de96-4e18-409d-8e60-38d58f2f11b9",
+ "attempt": 1,
+ "workflowExecutionExpirationTime": "2024-05-28T02:46:56.853Z",
+ "firstWorkflowTaskBackoff": "0s",
+ "memo": {
+ },
+ "searchAttributes": {
+ },
+ "header": {
+ }
+ }
+ },
+ {
+ "eventId": "2",
+ "eventTime": "2024-05-28T02:46:26.852896774Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_EXECUTION_SIGNALED",
+ "taskId": "31457281",
+ "workflowExecutionSignaledEventAttributes": {
+ "signalName": "miss",
+ "input": {
+ "payloads": [
+ {
+ "metadata": {
+ "encoding": "anNvbi9wbGFpbg=="
+ },
+ "data": "MQ=="
+ }
+ ]
+ },
+ "identity": "4514@DESKTOP-JRJDVRG\n",
+ "header": {
+ }
+ }
+ },
+ {
+ "eventId": "3",
+ "eventTime": "2024-05-28T02:46:26.852900524Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_TASK_SCHEDULED",
+ "taskId": "31457282",
+ "workflowTaskScheduledEventAttributes": {
+ "taskQueue": {
+ "name": "general",
+ "kind": "TASK_QUEUE_KIND_NORMAL"
+ },
+ "startToCloseTimeout": "10s",
+ "attempt": 1
+ }
+ },
+ {
+ "eventId": "4",
+ "eventTime": "2024-05-28T02:46:26.873042948Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_TASK_STARTED",
+ "taskId": "31457287",
+ "workflowTaskStartedEventAttributes": {
+ "scheduledEventId": "3",
+ "identity": "4417@DESKTOP-JRJDVRG\n",
+ "requestId": "0074c78e-013b-4845-86d5-f83f1f6feb61",
+ "historySizeBytes": "421"
+ }
+ },
+ {
+ "eventId": "5",
+ "eventTime": "2024-05-28T02:46:26.896346434Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_TASK_COMPLETED",
+ "taskId": "31457291",
+ "workflowTaskCompletedEventAttributes": {
+ "scheduledEventId": "3",
+ "startedEventId": "4",
+ "identity": "4417@DESKTOP-JRJDVRG\n",
+ "binaryChecksum": "07d96d88e3691440609a4f5de039969b14a4e6f8",
+ "sdkMetadata": {
+ "langUsedFlags": [
+ 2
+ ]
+ }
+ }
+ },
+ {
+ "eventId": "6",
+ "eventTime": "2024-05-28T02:46:27.869664722Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_EXECUTION_SIGNALED",
+ "taskId": "31457294",
+ "workflowExecutionSignaledEventAttributes": {
+ "signalName": "miss",
+ "input": {
+ "payloads": [
+ {
+ "metadata": {
+ "encoding": "anNvbi9wbGFpbg=="
+ },
+ "data": "Mg=="
+ }
+ ]
+ },
+ "identity": "4514@DESKTOP-JRJDVRG\n"
+ }
+ },
+ {
+ "eventId": "7",
+ "eventTime": "2024-05-28T02:46:27.869669568Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_TASK_SCHEDULED",
+ "taskId": "31457295",
+ "workflowTaskScheduledEventAttributes": {
+ "taskQueue": {
+ "name": "general",
+ "kind": "TASK_QUEUE_KIND_NORMAL"
+ },
+ "startToCloseTimeout": "10s",
+ "attempt": 1
+ }
+ },
+ {
+ "eventId": "8",
+ "eventTime": "2024-05-28T02:46:27.881436143Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_TASK_STARTED",
+ "taskId": "31457298",
+ "workflowTaskStartedEventAttributes": {
+ "scheduledEventId": "7",
+ "identity": "4417@DESKTOP-JRJDVRG\n",
+ "requestId": "b1c0b0cd-cdb1-4bfd-973c-fa43eef6dfb5",
+ "historySizeBytes": "749"
+ }
+ },
+ {
+ "eventId": "9",
+ "eventTime": "2024-05-28T02:46:27.907949953Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_TASK_COMPLETED",
+ "taskId": "31457302",
+ "workflowTaskCompletedEventAttributes": {
+ "scheduledEventId": "7",
+ "startedEventId": "8",
+ "identity": "4417@DESKTOP-JRJDVRG\n",
+ "binaryChecksum": "07d96d88e3691440609a4f5de039969b14a4e6f8"
+ }
+ },
+ {
+ "eventId": "10",
+ "eventTime": "2024-05-28T02:46:28.883578435Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_EXECUTION_SIGNALED",
+ "taskId": "31457304",
+ "workflowExecutionSignaledEventAttributes": {
+ "signalName": "hit",
+ "input": {
+ "payloads": [
+ {
+ "metadata": {
+ "encoding": "anNvbi9wbGFpbg=="
+ },
+ "data": "Mw=="
+ }
+ ]
+ },
+ "identity": "4514@DESKTOP-JRJDVRG\n"
+ }
+ },
+ {
+ "eventId": "11",
+ "eventTime": "2024-05-28T02:46:28.883586706Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_TASK_SCHEDULED",
+ "taskId": "31457305",
+ "workflowTaskScheduledEventAttributes": {
+ "taskQueue": {
+ "name": "general",
+ "kind": "TASK_QUEUE_KIND_NORMAL"
+ },
+ "startToCloseTimeout": "10s",
+ "attempt": 1
+ }
+ },
+ {
+ "eventId": "12",
+ "eventTime": "2024-05-28T02:46:28.899268187Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_TASK_STARTED",
+ "taskId": "31457308",
+ "workflowTaskStartedEventAttributes": {
+ "scheduledEventId": "11",
+ "identity": "4417@DESKTOP-JRJDVRG\n",
+ "requestId": "4840d372-5d7f-46f0-af41-85c9fcac752d",
+ "historySizeBytes": "1071"
+ }
+ },
+ {
+ "eventId": "13",
+ "eventTime": "2024-05-28T02:46:28.925343005Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_TASK_COMPLETED",
+ "taskId": "31457312",
+ "workflowTaskCompletedEventAttributes": {
+ "scheduledEventId": "11",
+ "startedEventId": "12",
+ "identity": "4417@DESKTOP-JRJDVRG\n",
+ "binaryChecksum": "07d96d88e3691440609a4f5de039969b14a4e6f8"
+ }
+ },
+ {
+ "eventId": "14",
+ "eventTime": "2024-05-28T02:46:28.925386163Z",
+ "eventType": "EVENT_TYPE_ACTIVITY_TASK_SCHEDULED",
+ "taskId": "31457313",
+ "activityTaskScheduledEventAttributes": {
+ "activityId": "14",
+ "activityType": {
+ "name": "HelloWorldActivity"
+ },
+ "taskQueue": {
+ "name": "general",
+ "kind": "TASK_QUEUE_KIND_NORMAL"
+ },
+ "header": {
+ "fields": {
+ "test-header": {
+ "metadata": {
+ "encoding": "anNvbi9wbGFpbg=="
+ },
+ "data": "InRlc3Qi"
+ }
+ }
+ },
+ "input": {
+ "payloads": [
+ {
+ "metadata": {
+ "encoding": "anNvbi9wbGFpbg=="
+ },
+ "data": "ImV4cGVjdGVkIHNpZ25hbCI="
+ }
+ ]
+ },
+ "scheduleToCloseTimeout": "30s",
+ "scheduleToStartTimeout": "30s",
+ "startToCloseTimeout": "30s",
+ "heartbeatTimeout": "0s",
+ "workflowTaskCompletedEventId": "13",
+ "retryPolicy": {
+ "initialInterval": "1s",
+ "backoffCoefficient": 2,
+ "maximumInterval": "100s"
+ }
+ }
+ },
+ {
+ "eventId": "15",
+ "eventTime": "2024-05-28T02:46:28.944893259Z",
+ "eventType": "EVENT_TYPE_ACTIVITY_TASK_STARTED",
+ "taskId": "31457317",
+ "activityTaskStartedEventAttributes": {
+ "scheduledEventId": "14",
+ "identity": "4417@DESKTOP-JRJDVRG\n",
+ "requestId": "73f99ef3-e606-421a-ad79-a4e43e41ceba",
+ "attempt": 1
+ }
+ },
+ {
+ "eventId": "16",
+ "eventTime": "2024-05-28T02:46:29.008828231Z",
+ "eventType": "EVENT_TYPE_ACTIVITY_TASK_COMPLETED",
+ "taskId": "31457318",
+ "activityTaskCompletedEventAttributes": {
+ "result": {
+ "payloads": [
+ {
+ "metadata": {
+ "encoding": "anNvbi9wbGFpbg=="
+ },
+ "data": "IkhlbGxvIFdvcmxkLCBleHBlY3RlZCBzaWduYWwi"
+ }
+ ]
+ },
+ "scheduledEventId": "14",
+ "startedEventId": "15",
+ "identity": "4417@DESKTOP-JRJDVRG\n"
+ }
+ },
+ {
+ "eventId": "17",
+ "eventTime": "2024-05-28T02:46:29.008834769Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_TASK_SCHEDULED",
+ "taskId": "31457319",
+ "workflowTaskScheduledEventAttributes": {
+ "taskQueue": {
+ "name": "general",
+ "kind": "TASK_QUEUE_KIND_NORMAL"
+ },
+ "startToCloseTimeout": "10s",
+ "attempt": 1
+ }
+ },
+ {
+ "eventId": "18",
+ "eventTime": "2024-05-28T02:46:29.022515754Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_TASK_STARTED",
+ "taskId": "31457322",
+ "workflowTaskStartedEventAttributes": {
+ "scheduledEventId": "17",
+ "identity": "4417@DESKTOP-JRJDVRG\n",
+ "requestId": "a24ea1bd-8584-41ae-8cc3-0880b8a946d1",
+ "historySizeBytes": "1713"
+ }
+ },
+ {
+ "eventId": "19",
+ "eventTime": "2024-05-28T02:46:29.043259634Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_TASK_COMPLETED",
+ "taskId": "31457326",
+ "workflowTaskCompletedEventAttributes": {
+ "scheduledEventId": "17",
+ "startedEventId": "18",
+ "identity": "4417@DESKTOP-JRJDVRG\n",
+ "binaryChecksum": "07d96d88e3691440609a4f5de039969b14a4e6f8"
+ }
+ },
+ {
+ "eventId": "20",
+ "eventTime": "2024-05-28T02:46:29.043294503Z",
+ "eventType": "EVENT_TYPE_WORKFLOW_EXECUTION_COMPLETED",
+ "taskId": "31457327",
+ "workflowExecutionCompletedEventAttributes": {
+ "result": {
+ "payloads": [
+ {
+ "metadata": {
+ "encoding": "anNvbi9wbGFpbg=="
+ },
+ "data": "Mw=="
+ }
+ ]
+ },
+ "workflowTaskCompletedEventId": "19"
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/examples/spec/replay/signal_with_start_spec.rb b/examples/spec/replay/signal_with_start_spec.rb
new file mode 100644
index 00000000..13c1cb0d
--- /dev/null
+++ b/examples/spec/replay/signal_with_start_spec.rb
@@ -0,0 +1,21 @@
+require "workflows/signal_with_start_workflow"
+require "temporal/testing/replay_tester"
+require "temporal/workflow/history/serialization"
+
+describe "signal with start" do
+ let(:replay_tester) { Temporal::Testing::ReplayTester.new }
+
+ it "two misses, one hit, replay, json" do
+ replay_tester.replay_history(
+ SignalWithStartWorkflow,
+ Temporal::Workflow::History::Serialization.from_json_file("spec/replay/histories/signal_with_start.json")
+ )
+ end
+
+ it "two misses, one hit, replay, binary" do
+ replay_tester.replay_history(
+ SignalWithStartWorkflow,
+ Temporal::Workflow::History::Serialization.from_protobuf_file("spec/replay/histories/signal_with_start.binpb")
+ )
+ end
+end
diff --git a/examples/synchronous-proxy/README.md b/examples/synchronous-proxy/README.md
new file mode 100644
index 00000000..67010958
--- /dev/null
+++ b/examples/synchronous-proxy/README.md
@@ -0,0 +1,77 @@
+# Purpose
+
+This pattern is used when a non-workflow process needs to advance a workflow state
+machine from its initial state to its terminal state. It does this by adding input
+data to the workflow (via Signals) and receiving new information back from the
+workflow (when a secondary proxy workflow exits and returns a value).
+
+The only way to add information to a workflow is via a Signal.
+
+There are two ways to
+get information out of a workflow. One, the workflow has a Query handler and can respond
+to queries. However, this is limited in that Queries may not modify the state of the
+workflow itself. Two, the workflow can exit and return a result to its caller. This
+second approach is leveraged by the pattern to get information back from the primary
+workflow. This information could be used to determine branching behavior for the
+non-workflow caller.
+
+The flow of calls is outlined in the diagram below.
+
+![Flow Diagram](flow.png)
+
+# Explanation
+
+The primary use-case for this pattern is for a non-workflow process to *send and receive* data
+to and from a workflow. Note that a Temporal client may send a signal to a workflow but the
+information transfer is one-way (i.e. fire and forget). There is no mechanism for a workflow
+to send a signal to a non-workflow. A keen observer would note that a Query can be used to
+ask for information, however a Query is supposed to be idempotent and *should not cause any
+state change* in the workflow itself. Also, Queries imply polling for a result which is slow
+and inefficient. Therefore, it is not a mechanism for sending new information
+into a workflow and receiving a response.
+
+So, the non-workflow process can communicate to a workflow by:
+
+a) Starting that workflow, and
+
+b) Communicating with the workflow by creating proxy workflows to signal the main workflow and
+then block for a response. When these proxy workflows exit, they can return the response to the
+caller.
+
+In the real world, this pattern could be utilized for managing an interaction via a series of
+web pages. Imagine that a user lands on a home page and clicks a link to apply for a library
+card. The link hits the web application's controller and can now start the
+`ApplyForLibraryCardWorkflow`. The workflow ID could be returned back in a response to the caller
+as a session value, for example.
+
+On the next page, the user can fill out the application for the library card by providing their
+name, address, and phone number. Upon submission of this form (via POST), the web application
+controller can 1) lookup the associated workflow from the session, and 2) create the
+`SubmitPersonalDetailsWorkflow` workflow and pass in the form data. This workflow packages up
+the data and signals it to the `ApplyForLibraryCardWorkflow` and waits for a response via another
+signal. The main workflow applies the appropriate business logic to the payload and advances its
+state. It then signals back to the proxy workflow the result of its work and then blocks to
+await new data.
+
+Depending on the response from the `ApplyForLibraryCardWorkflow`, the controller can render a page
+to continue the application or ask for the user to correct some bad input.
+
+Continue and repeat this action via the web application controller(s) as it moves the user
+through the entire library card application journey. By its nature, web applications are all stateless
+and asynchronous, so the state and behavior are encapsulated by the workflow and its associated
+activity outcomes. The only state outside of the workflow that the web application cares about is the
+session information so it can match the user back to the correct workflow.
+
+# Execution
+
+Open two shells / terminal windows. In one, execute:
+```shell
+ruby worker/worker.rb
+```
+In the second, execute:
+```shell
+ruby ui/main.rb
+```
+In the shell running `ui` it will ask a series of questions. Answer the questions and the
+program will send the appropriate signals around to complete the process. Upon completion it
+prints a success message.
diff --git a/examples/synchronous-proxy/activities.rb b/examples/synchronous-proxy/activities.rb
new file mode 100644
index 00000000..766a6bff
--- /dev/null
+++ b/examples/synchronous-proxy/activities.rb
@@ -0,0 +1,57 @@
+module SynchronousProxy
+ class RegisterEmailActivity < Temporal::Activity
+ def execute(email)
+ logger.info "activity: registered email #{email}"
+ nil
+ end
+ end
+
+ class ValidateSizeActivity < Temporal::Activity
+ InvalidSize = Class.new(StandardError)
+
+ retry_policy(
+ interval: 1,
+ backoff: 1,
+ max_attempts: 3,
+ non_retriable_errors: [InvalidSize])
+
+ def execute(size)
+ logger.info "activity: validate size #{size}"
+ return nil if TShirtSizes.include?(size)
+
+ raise InvalidSize.new("#{size} is not a valid size choice.")
+ end
+ end
+
+ class ValidateColorActivity < Temporal::Activity
+ InvalidColor = Class.new(StandardError)
+
+ retry_policy(
+ interval: 1,
+ backoff: 1,
+ max_attempts: 3,
+ non_retriable_errors: [InvalidColor])
+
+ def execute(color)
+ logger.info "activity: validate color #{color}"
+ return nil if TShirtColors.include?(color)
+
+ raise InvalidColor.new("#{color} is not a valid color choice.")
+ end
+ end
+
+ class ScheduleDeliveryActivity < Temporal::Activity
+ def execute(order)
+ delivery_date = Time.now + (2 * 60 * 60 * 24)
+ logger.info "activity: scheduled delivery for order #{order} at #{delivery_date}"
+ delivery_date
+ end
+ end
+
+ class SendDeliveryEmailActivity < Temporal::Activity
+ def execute(order, order_id, delivery_date)
+ logger.info "email to: #{order.email}, order: #{order}, scheduled delivery: #{delivery_date}"
+ nil
+ end
+ end
+end
diff --git a/examples/synchronous-proxy/configuration.rb b/examples/synchronous-proxy/configuration.rb
new file mode 100644
index 00000000..09f179b9
--- /dev/null
+++ b/examples/synchronous-proxy/configuration.rb
@@ -0,0 +1,17 @@
+require 'bundler'
+Bundler.require :default
+
+require 'temporal'
+
+Temporal.configure do |config|
+ config.host = 'localhost'
+ config.port = 7233
+ config.namespace = 'ruby-samples'
+ config.task_queue = 'ui-driven'
+end
+
+begin
+ Temporal.register_namespace('ruby-samples', 'A safe space for playing with Temporal Ruby')
+rescue Temporal::NamespaceAlreadyExistsFailure
+ nil # service was already registered
+end
diff --git a/examples/synchronous-proxy/flow.png b/examples/synchronous-proxy/flow.png
new file mode 100644
index 00000000..90b9b0f4
Binary files /dev/null and b/examples/synchronous-proxy/flow.png differ
diff --git a/examples/synchronous-proxy/proxy/communications.rb b/examples/synchronous-proxy/proxy/communications.rb
new file mode 100644
index 00000000..63892f62
--- /dev/null
+++ b/examples/synchronous-proxy/proxy/communications.rb
@@ -0,0 +1,112 @@
+module SynchronousProxy
+ module Proxy
+ # We support talking between two workflows using these helper methods. Each workflow
+ # that wants to communicate will include this module. Unlike the Go examples which use
+ # channels, the Ruby support is via #on_signal (to receive), Temporal.signal_workflow
+ # (to send), and #wait_for (to block and wait for incoming signal).
+ #
+ # The basic trick is that we register a single #on_signal signal handler per workflow
+ # via the #setup_signal_handler method. Each incoming signal is parsed to determine if
+ # it's a request or response and then the appropriate ivar is set. After the signal handler
+ # runs, this client executes the block attached to the #wait_for method to see if it
+ # returns true. If the block evaluates that it has received a value into the ivar, it
+ # returns true and unblocks.
+ #
+ module Communications
+ REQUEST_SIGNAL_NAME = "proxy-request-signal".freeze
+ RESPONSE_SIGNAL_NAME = "proxy-response-signal".freeze
+
+ SignalDetails = Struct.new(
+ :name, :key, :value, :error, :calling_workflow_id,
+ keyword_init: true
+ ) do
+ def error?
+ key == "error"
+ end
+
+ def to_input
+ [calling_workflow_id, name, key, value]
+ end
+
+ def self.from_input(input)
+ new(name: input[1], key: input[2], value: input[3], calling_workflow_id: input[0])
+ end
+ end
+
+ def setup_signal_handler
+ w_id = workflow.metadata.id
+ logger.info("#{self.class.name}#setup_signal_handler, Setup signal handler for workflow #{w_id}")
+
+ workflow.on_signal do |signal, input|
+ logger.info("#{self.class.name}#setup_signal_handler, Received signal for workflow #{w_id}, signal #{signal}, input #{input.inspect}")
+ details = SignalDetails.from_input(input)
+
+ case signal
+ when REQUEST_SIGNAL_NAME
+ @request_signal = details
+
+ when RESPONSE_SIGNAL_NAME
+ @response_signal = details
+
+ else
+ logger.warn "#{self.class.name}#setup_signal_handler, Unknown signal received"
+ end
+ end
+ end
+
+ def wait_for_response
+ # #workflow is defined as part of the Temporal::Workflow class and is therefore available to
+ # any methods inside the class plus methods that are included from a Module like this one
+ workflow.wait_for { !!@response_signal }
+ end
+
+ def wait_for_request
+ workflow.wait_for { !!@request_signal }
+ end
+
+ def send_error_response(target_workflow_id, err)
+ w_id = workflow.metadata.id
+
+ logger.info("#{self.class.name}#send_error_response, Sending error response from #{w_id} to #{target_workflow_id}")
+ logger.info("#{self.class.name}#send_error_response, err is #{err.inspect}")
+ details = SignalDetails.new(key: "error", value: err, calling_workflow_id: w_id)
+ workflow.signal_external_workflow(workflow, RESPONSE_SIGNAL_NAME, target_workflow_id, "", details.to_input)
+ nil
+ end
+
+ def send_response(target_workflow_id, key, value)
+ w_id = workflow.metadata.id
+
+ logger.info("#{self.class.name}#send_response, Sending response from #{w_id} to #{target_workflow_id}")
+ details = SignalDetails.new(key: key, value: value, calling_workflow_id: w_id)
+ workflow.signal_external_workflow(workflow, RESPONSE_SIGNAL_NAME, target_workflow_id, "", details.to_input)
+ nil
+ end
+
+ def send_request(target_workflow_id, key, value)
+ w_id = workflow.metadata.id
+
+ logger.info("#{self.class.name}#send_request, Sending request from #{w_id} to #{target_workflow_id}, key #{key}, value #{value}, calling workflow #{w_id}")
+ details = SignalDetails.new(key: key, value: value, calling_workflow_id: w_id)
+ workflow.signal_external_workflow(workflow, REQUEST_SIGNAL_NAME, target_workflow_id, "", details.to_input)
+ nil
+ end
+
+ def receive_response(description="unknown")
+ @response_signal = nil
+ w_id = workflow.metadata.id
+ Temporal.logger.info("#{self.class.name}#receive_response, Waiting for response on [#{description}] in workflow #{w_id}")
+ wait_for_response
+ @response_signal
+ end
+
+ def receive_request(description="unknown")
+ @request_signal = nil
+ w_id = workflow.metadata.id
+ Temporal.logger.info("#{self.class.name}#receive_request, Waiting for request on [#{description}] in workflow #{w_id}")
+ wait_for_request
+ @request_signal
+ end
+ end
+ end
+end
diff --git a/examples/synchronous-proxy/ui/main.rb b/examples/synchronous-proxy/ui/main.rb
new file mode 100644
index 00000000..d7248006
--- /dev/null
+++ b/examples/synchronous-proxy/ui/main.rb
@@ -0,0 +1,68 @@
+require_relative "../configuration"
+require_relative "../workflows"
+
+module SynchronousProxy
+ module UI
+ class Main
+ def run
+ random_id = rand(999_999_999)
+ sequence_no = 0
+ status = create_order(random_id, sequence_no)
+
+ sequence_no += 1
+ email = prompt_and_read_input("Please enter you email address:")
+ status = update_order(random_id: random_id, sequence_no: sequence_no, order_id: status.order_id, stage: SynchronousProxy::RegisterStage, value: email)
+ puts "status #{status.inspect}"
+
+ sequence_no += 1
+ begin
+ size = prompt_and_read_input("Please enter your requested size:")
+ status = update_order(random_id: random_id, sequence_no: sequence_no, order_id: status.order_id, stage: SynchronousProxy::SizeStage, value: size)
+ puts "status #{status.inspect}"
+ rescue SynchronousProxy::ValidateSizeActivity::InvalidSize => e
+ STDERR.puts e.message
+ retry
+ end
+
+ sequence_no += 1
+ begin
+ color = prompt_and_read_input("Please enter your required tshirt color:")
+ status = update_order(random_id: random_id, sequence_no: sequence_no, order_id: status.order_id, stage: SynchronousProxy::ColorStage, value: color)
+ puts "status #{status.inspect}"
+ rescue SynchronousProxy::ValidateColorActivity::InvalidColor => e
+ STDERR.puts e.message
+ retry
+ end
+
+ puts "Thanks for your order!"
+ puts "You will receive an email with shipping details shortly"
+ puts "Exiting at #{Time.now}"
+ end
+
+ def create_order(random_id, sequence_no)
+ w_id = "new-tshirt-order-#{random_id}-#{sequence_no}"
+ workflow_options = {workflow_id: w_id}
+ Temporal.start_workflow(SynchronousProxy::OrderWorkflow, options: workflow_options)
+ status = SynchronousProxy::OrderStatus.new
+ status.order_id = w_id
+ status
+ end
+
+ def update_order(random_id:, sequence_no:, order_id:, stage:, value:)
+ w_id = "update_#{stage}_#{random_id}-#{sequence_no}"
+ workflow_options = {workflow_id: w_id}
+ run_id = Temporal.start_workflow(SynchronousProxy::UpdateOrderWorkflow, order_id, stage, value, options: workflow_options)
+ Temporal.await_workflow_result(SynchronousProxy::UpdateOrderWorkflow, workflow_id: w_id, run_id: run_id)
+ end
+
+ def prompt_and_read_input(prompt)
+ print(prompt + " ")
+ gets.chomp
+ end
+ end
+ end
+end
+
+if $0 == __FILE__
+ SynchronousProxy::UI::Main.new.run
+end
diff --git a/examples/synchronous-proxy/worker/worker.rb b/examples/synchronous-proxy/worker/worker.rb
new file mode 100644
index 00000000..6694db37
--- /dev/null
+++ b/examples/synchronous-proxy/worker/worker.rb
@@ -0,0 +1,15 @@
+require_relative "../configuration"
+require_relative "../workflows"
+require_relative "../activities"
+require 'temporal/worker'
+
+worker = Temporal::Worker.new
+worker.register_workflow(SynchronousProxy::OrderWorkflow)
+worker.register_workflow(SynchronousProxy::UpdateOrderWorkflow)
+worker.register_workflow(SynchronousProxy::ShippingWorkflow)
+worker.register_activity(SynchronousProxy::RegisterEmailActivity)
+worker.register_activity(SynchronousProxy::ValidateSizeActivity)
+worker.register_activity(SynchronousProxy::ValidateColorActivity)
+worker.register_activity(SynchronousProxy::ScheduleDeliveryActivity)
+worker.register_activity(SynchronousProxy::SendDeliveryEmailActivity)
+worker.start
diff --git a/examples/synchronous-proxy/workflows.rb b/examples/synchronous-proxy/workflows.rb
new file mode 100644
index 00000000..f122a6e2
--- /dev/null
+++ b/examples/synchronous-proxy/workflows.rb
@@ -0,0 +1,133 @@
+require_relative "proxy/communications"
+require_relative "activities"
+
+module SynchronousProxy
+ RegisterStage = "register".freeze
+ SizeStage = "size".freeze
+ ColorStage = "color".freeze
+ ShippingStage = "shipping".freeze
+
+ TShirtSizes = ["small", "medium", "large"]
+ TShirtColors = ["red", "blue", "black"]
+
+ OrderStatus = Struct.new(:order_id, :stage, keyword_init: true)
+ TShirtOrder = Struct.new(:email, :size, :color) do
+ def to_s
+ "size: #{size}, color: #{color}"
+ end
+ end
+
+ class OrderWorkflow < Temporal::Workflow
+ include Proxy::Communications # defines #receive_request, #receive_response, #send_error_response, #send_request, and #send_response
+
+ timeouts start_to_close: 60
+
+ def execute
+ order = TShirtOrder.new
+ setup_signal_handler
+
+ # Loop until we receive a valid email
+ loop do
+ signal_detail = receive_request("email_payload")
+ source_id, email = signal_detail.calling_workflow_id, signal_detail.value
+ future = RegisterEmailActivity.execute(email)
+
+ future.failed do |exception|
+ send_error_response(source_id, exception)
+ logger.warn "RegisterEmailActivity returned an error, loop back to top"
+ end
+
+ future.done do
+ order.email = email
+ send_response(source_id, SizeStage, "")
+ end
+
+ future.get
+ break unless future.failed?
+ end
+
+ # Loop until we receive a valid size
+ loop do
+ signal_detail = receive_request("size_payload")
+ source_id, size = signal_detail.calling_workflow_id, signal_detail.value
+ future = ValidateSizeActivity.execute(size)
+
+ future.failed do |exception|
+ send_error_response(source_id, exception)
+ logger.warn "ValidateSizeActivity returned an error, loop back to top"
+ end
+
+ future.done do
+ order.size = size
+ logger.info "ValidateSizeActivity succeeded, progress to next stage"
+ send_response(source_id, ColorStage, "")
+ end
+
+ future.get # block waiting for response
+ break unless future.failed?
+ end
+
+ # Loop until we receive a valid color
+ loop do
+ signal_detail = receive_request("color_payload")
+ source_id, color = signal_detail.calling_workflow_id, signal_detail.value
+ future = ValidateColorActivity.execute(color)
+
+ future.failed do |exception|
+ send_error_response(source_id, exception)
+ logger.warn "ValidateColorActivity returned an error, loop back to top"
+ end
+
+ future.done do
+ order.color = color
+ logger.info "ValidateColorActivity succeeded, progress to next stage"
+ send_response(source_id, ShippingStage, "")
+ end
+
+ future.get # block waiting for response
+ break unless future.failed?
+ end
+
+ # #execute_workflow! blocks until child workflow exits with a result
+ workflow.execute_workflow!(SynchronousProxy::ShippingWorkflow, order, workflow.metadata.id)
+ nil
+ end
+ end
+
+ class UpdateOrderWorkflow < Temporal::Workflow
+ include Proxy::Communications
+ timeouts start_to_close: 60
+
+ def execute(order_workflow_id, stage, value)
+ w_id = workflow.metadata.id
+ setup_signal_handler
+ status = OrderStatus.new(order_id: order_workflow_id, stage: stage)
+ signal_workflow_execution_response = send_request(order_workflow_id, stage, value)
+
+ signal_details = receive_response("#{stage}_stage_payload")
+ logger.warn "UpdateOrderWorkflow received signal_details #{signal_details.inspect}, error? #{signal_details.error?}"
+ raise signal_details.value.class, signal_details.value.message if signal_details.error?
+
+ status.stage = signal_details.key # next stage
+ status
+ end
+ end
+
+ class ShippingWorkflow < Temporal::Workflow
+ timeouts run: 60
+
+ def execute(order, order_workflow_id)
+ future = ScheduleDeliveryActivity.execute(order_workflow_id)
+
+ future.failed do |exception|
+ logger.warn "ShippingWorkflow, ScheduleDelivery failed"
+ end
+
+ future.done do |delivery_date|
+ SendDeliveryEmailActivity.execute!(order, order_workflow_id, delivery_date)
+ end
+
+ future.get
+ end
+ end
+end
diff --git a/examples/workflows/calls_delegator_workflow.rb b/examples/workflows/calls_delegator_workflow.rb
new file mode 100644
index 00000000..253e6a5f
--- /dev/null
+++ b/examples/workflows/calls_delegator_workflow.rb
@@ -0,0 +1,21 @@
+require 'activities/delegator_activity'
+
+class CallsDelegatorWorkflow < Temporal::Workflow
+
+ # In-workflow client to remotely invoke activity.
+ def call_executor(executor_class, args)
+ # We want temporal to record the MyExecutor class--e.g. 'Plus','Times'--as the name of the activites,
+ # rather than DelegatorActivity, for better debuggability
+ workflow.execute_activity!(
+ executor_class,
+ args
+ )
+ end
+
+ def execute
+ operands = { a: 5, b: 3 }
+ result_1 = call_executor(Plus, operands)
+ result_2 = call_executor(Times, operands)
+ { sum: result_1, product: result_2 }
+ end
+end
diff --git a/examples/workflows/child_workflow_terminated_workflow.rb b/examples/workflows/child_workflow_terminated_workflow.rb
new file mode 100644
index 00000000..a64516a7
--- /dev/null
+++ b/examples/workflows/child_workflow_terminated_workflow.rb
@@ -0,0 +1,24 @@
+require 'workflows/simple_timer_workflow'
+require 'activities/terminate_workflow_activity'
+
+class ChildWorkflowTerminatedWorkflow < Temporal::Workflow
+ def execute
+ # start a child workflow that executes for 60 seconds, then attempts to try and terminate that workflow
+ result = SimpleTimerWorkflow.execute(60)
+ child_workflow_execution = result.child_workflow_execution_future.get
+ TerminateWorkflowActivity.execute!(
+ 'ruby-samples',
+ child_workflow_execution.workflow_id,
+ child_workflow_execution.run_id
+ )
+
+ # Give time for termination to propagate
+ workflow.sleep(1)
+
+ # check that the result is now 'failed'
+ {
+ child_workflow_terminated: result.failed?, # terminated is represented as failed? with the Terminated Error
+ error: result.get
+ }
+ end
+end
diff --git a/examples/workflows/child_workflow_timeout_workflow.rb b/examples/workflows/child_workflow_timeout_workflow.rb
new file mode 100644
index 00000000..41a2bde4
--- /dev/null
+++ b/examples/workflows/child_workflow_timeout_workflow.rb
@@ -0,0 +1,15 @@
+require 'workflows/quick_timeout_workflow'
+
+class ChildWorkflowTimeoutWorkflow < Temporal::Workflow
+ def execute
+ # workflow timesout before it can finish running, we should be able to detect that with .failed?
+ result = QuickTimeoutWorkflow.execute
+
+ result.get # wait for the workflow to finish so we can detect if it failed or not
+
+ {
+ child_workflow_failed: result.failed?,
+ error: result.get
+ }
+ end
+end
diff --git a/examples/workflows/continue_as_new_workflow.rb b/examples/workflows/continue_as_new_workflow.rb
new file mode 100644
index 00000000..bf97b079
--- /dev/null
+++ b/examples/workflows/continue_as_new_workflow.rb
@@ -0,0 +1,19 @@
+require 'activities/hello_world_activity'
+
+# Demonstrates how to use history_size to determine when to continue as new
+class ContinueAsNewWorkflow < Temporal::Workflow
+ def execute(hello_count, bytes_max, run = 1)
+ while hello_count.positive? && workflow.history_size.bytes < bytes_max
+ HelloWorldActivity.execute!("Alice Long#{'long' * 100}name")
+ hello_count -= 1
+ end
+
+ workflow.logger.info("Workflow history size: #{workflow.history_size}, remaining hellos: #{hello_count}")
+
+ return workflow.continue_as_new(hello_count, bytes_max, run + 1) if hello_count.positive?
+
+ {
+ runs: run
+ }
+ end
+end
diff --git a/examples/workflows/delegator_workflow.rb b/examples/workflows/delegator_workflow.rb
new file mode 100644
index 00000000..4d4c6cb1
--- /dev/null
+++ b/examples/workflows/delegator_workflow.rb
@@ -0,0 +1,33 @@
+# This sample illustrates using a dynamic Activity to delegate to another set of non-activity
+# classes. This is an advanced use case, used, for example, for integrating with an existing framework
+# that doesn't know about temporal.
+# See Temporal::Worker#register_dynamic_activity for more info.
+
+# An example of another non-Activity class hierarchy.
+class MyWorkflowExecutor
+ def do_it(_args)
+ raise NotImplementedError
+ end
+end
+
+class PlusExecutor < MyWorkflowExecutor
+ def do_it(args)
+ args[:a] + args[:b]
+ end
+end
+
+class TimesExecutor < MyWorkflowExecutor
+ def do_it(args)
+ args[:a] * args[:b]
+ end
+end
+
+# Calls into our other class hierarchy.
+class DelegatorWorkflow < Temporal::Workflow
+ def execute(input)
+ executor = Object.const_get(workflow.name).new
+ raise ArgumentError, "Unknown workflow: #{executor.class}" unless executor.is_a?(MyWorkflowExecutor)
+
+ {computation: executor.do_it(input)}
+ end
+end
diff --git a/examples/workflows/handling_structured_error_workflow.rb b/examples/workflows/handling_structured_error_workflow.rb
new file mode 100644
index 00000000..6d50fbc2
--- /dev/null
+++ b/examples/workflows/handling_structured_error_workflow.rb
@@ -0,0 +1,17 @@
+require 'activities/failing_with_structured_error_activity'
+
+class HandlingStructuredErrorWorkflow < Temporal::Workflow
+
+ def execute(foo, bar)
+ begin
+ FailingWithStructuredErrorActivity.execute!(foo, bar)
+ rescue FailingWithStructuredErrorActivity::MyError => e
+ if e.foo == foo && e.bar == bar
+ return 'successfully handled error'
+ else
+ raise "Failure: didn't receive expected error from the activity"
+ end
+ end
+ raise "Failure: didn't receive any error from the activity"
+ end
+end
diff --git a/examples/workflows/invalid_continue_as_new_workflow.rb b/examples/workflows/invalid_continue_as_new_workflow.rb
new file mode 100644
index 00000000..99b52d98
--- /dev/null
+++ b/examples/workflows/invalid_continue_as_new_workflow.rb
@@ -0,0 +1,17 @@
+require 'activities/hello_world_activity'
+
+# If you run this, you'll get a WorkflowAlreadyCompletingError because after the
+# continue_as_new, we try to do something else.
+class InvalidContinueAsNewWorkflow < Temporal::Workflow
+ timeouts execution: 20
+
+ def execute
+ future = HelloWorldActivity.execute('Alice')
+ workflow.sleep(1)
+ workflow.continue_as_new
+ # Doing anything after continue_as_new (or any workflow completion) is illegal
+ future.done do
+ HelloWorldActivity.execute('Bob')
+ end
+ end
+end
diff --git a/examples/workflows/long_workflow.rb b/examples/workflows/long_workflow.rb
index 3682fad9..09138ae1 100644
--- a/examples/workflows/long_workflow.rb
+++ b/examples/workflows/long_workflow.rb
@@ -2,13 +2,13 @@
class LongWorkflow < Temporal::Workflow
def execute(cycles = 10, interval = 1)
- future = LongRunningActivity.execute(cycles, interval)
+ future = LongRunningActivity.execute(cycles, interval, options: { timeouts: { heartbeat: interval * 2 } })
workflow.on_signal do |signal, input|
logger.warn "Signal received", { signal: signal, input: input }
future.cancel
end
- future.wait
+ future.get
end
end
diff --git a/examples/workflows/loop_workflow.rb b/examples/workflows/loop_workflow.rb
index 5b1f5bfd..b99408f4 100644
--- a/examples/workflows/loop_workflow.rb
+++ b/examples/workflows/loop_workflow.rb
@@ -8,6 +8,10 @@ def execute(count)
return workflow.continue_as_new(count - 1)
end
- return count
+ return {
+ count: count,
+ memo: workflow.metadata.memo,
+ headers: workflow.metadata.headers,
+ }
end
end
diff --git a/examples/workflows/metadata_workflow.rb b/examples/workflows/metadata_workflow.rb
new file mode 100644
index 00000000..62f61703
--- /dev/null
+++ b/examples/workflows/metadata_workflow.rb
@@ -0,0 +1,5 @@
+class MetadataWorkflow < Temporal::Workflow
+ def execute
+ workflow.metadata
+ end
+end
diff --git a/examples/workflows/parent_close_workflow.rb b/examples/workflows/parent_close_workflow.rb
new file mode 100644
index 00000000..a711d882
--- /dev/null
+++ b/examples/workflows/parent_close_workflow.rb
@@ -0,0 +1,15 @@
+require 'workflows/slow_child_workflow'
+
+class ParentCloseWorkflow < Temporal::Workflow
+ def execute(child_workflow_id, parent_close_policy)
+ options = {
+ workflow_id: child_workflow_id,
+ parent_close_policy: parent_close_policy,
+ }
+ result = SlowChildWorkflow.execute(1, options: options)
+
+ # waits for the child workflow to start before exiting
+ result.child_workflow_execution_future.get
+ return
+ end
+end
diff --git a/examples/workflows/parent_id_reuse_workflow.rb b/examples/workflows/parent_id_reuse_workflow.rb
new file mode 100644
index 00000000..f5aa9fa3
--- /dev/null
+++ b/examples/workflows/parent_id_reuse_workflow.rb
@@ -0,0 +1,25 @@
+require 'workflows/hello_world_workflow'
+require 'workflows/failing_workflow'
+
+class ParentIdReuseWorkflow < Temporal::Workflow
+ def execute(workflow_id_1, workflow_id_2, fail_first, reuse_policy)
+ execute_child(workflow_id_1, fail_first, reuse_policy)
+ execute_child(workflow_id_2, false, reuse_policy)
+ end
+
+ private
+
+ def execute_child(workflow_id, fail, reuse_policy)
+ options = {
+ workflow_id: workflow_id,
+ workflow_id_reuse_policy: reuse_policy
+ }
+
+ if fail
+ # wait for it, but don't raise when it fails
+ FailingWorkflow.execute(options: options).wait
+ else
+ HelloWorldWorkflow.execute!(options: options)
+ end
+ end
+end
diff --git a/examples/workflows/query_workflow.rb b/examples/workflows/query_workflow.rb
new file mode 100644
index 00000000..4ecc0f9f
--- /dev/null
+++ b/examples/workflows/query_workflow.rb
@@ -0,0 +1,36 @@
+class QueryWorkflow < Temporal::Workflow
+ attr_reader :state, :signal_count, :last_signal_received
+
+ def execute
+ @state = "started"
+ @signal_count = 0
+ @last_signal_received = nil
+
+ workflow.on_query("state") { |*args| apply_transforms(state, args) }
+ workflow.on_query("signal_count") { signal_count }
+
+ workflow.on_signal do |signal|
+ @signal_count += 1
+ @last_signal_received = signal
+ end
+
+ workflow.wait_until { last_signal_received == "finish" }
+ @state = "finished"
+
+ {
+ signal_count: signal_count,
+ last_signal_received: last_signal_received,
+ final_state: state
+ }
+ end
+
+ private
+
+ def apply_transforms(value, transforms)
+ return value if value.nil? || transforms.empty?
+ transforms.inject(value) do |memo, input|
+ next memo unless memo.respond_to?(input)
+ memo.public_send(input)
+ end
+ end
+end
diff --git a/examples/workflows/schedule_child_workflow.rb b/examples/workflows/schedule_child_workflow.rb
new file mode 100644
index 00000000..da4fa3c6
--- /dev/null
+++ b/examples/workflows/schedule_child_workflow.rb
@@ -0,0 +1,6 @@
+class ScheduleChildWorkflow < Temporal::Workflow
+ def execute(child_workflow_id, cron_schedule)
+ HelloWorldWorkflow.schedule(cron_schedule, options: { workflow_id: child_workflow_id })
+ workflow.sleep(1)
+ end
+end
diff --git a/examples/workflows/send_signal_to_external_workflow.rb b/examples/workflows/send_signal_to_external_workflow.rb
new file mode 100644
index 00000000..c4d560e4
--- /dev/null
+++ b/examples/workflows/send_signal_to_external_workflow.rb
@@ -0,0 +1,17 @@
+# Sends +signal_name+ to the +target_workflow+ from within a workflow.
+# This is different than using the Client#send_signal method which is
+# for signaling a workflow *from outside* any workflow.
+#
+# Returns :success or :failed
+#
+class SendSignalToExternalWorkflow < Temporal::Workflow
+ def execute(signal_name, target_workflow)
+ logger.info("Send a signal to an external workflow")
+ future = workflow.signal_external_workflow(WaitForExternalSignalWorkflow, signal_name, target_workflow, nil, ["arg1", "arg2"])
+ @status = nil
+ future.done { @status = :success }
+ future.failed { @status = :failed }
+ future.get
+ @status
+ end
+end
diff --git a/examples/workflows/signal_with_start_workflow.rb b/examples/workflows/signal_with_start_workflow.rb
new file mode 100644
index 00000000..cfee2bed
--- /dev/null
+++ b/examples/workflows/signal_with_start_workflow.rb
@@ -0,0 +1,26 @@
+require "activities/hello_world_activity"
+
+class SignalWithStartWorkflow < Temporal::Workflow
+
+ def execute(expected_signal)
+ initial_value = "no signal received"
+ received = initial_value
+
+ workflow.on_signal do |signal, input|
+ if signal == expected_signal
+ workflow.logger.info("Accepting expected signal #{signal}: #{input}")
+ HelloWorldActivity.execute!("expected signal")
+ received = input
+ else
+ workflow.logger.info("Ignoring unexpected signal #{signal}: #{input}")
+ end
+ end
+
+ # Wait for the activity in signal callbacks to complete. The workflow will
+ # not automatically wait for any blocking calls made in callbacks to complete
+ # before returning.
+ workflow.logger.info("Waiting for expected signal #{expected_signal}")
+ workflow.wait_until { received != initial_value }
+ received
+ end
+end
diff --git a/examples/workflows/signal_workflow.rb b/examples/workflows/signal_workflow.rb
new file mode 100644
index 00000000..d665533d
--- /dev/null
+++ b/examples/workflows/signal_workflow.rb
@@ -0,0 +1,12 @@
+class SignalWorkflow < Temporal::Workflow
+ def execute(sleep_for)
+ score = 0
+ workflow.on_signal('score') do |signal_value|
+ score += signal_value
+ end
+
+ workflow.sleep(sleep_for)
+
+ score
+ end
+end
diff --git a/examples/workflows/slow_child_workflow.rb b/examples/workflows/slow_child_workflow.rb
new file mode 100644
index 00000000..332b9a33
--- /dev/null
+++ b/examples/workflows/slow_child_workflow.rb
@@ -0,0 +1,9 @@
+class SlowChildWorkflow < Temporal::Workflow
+ def execute(delay)
+ if delay.positive?
+ workflow.sleep(delay)
+ end
+
+ return { parent_workflow_id: workflow.metadata.parent_id }
+ end
+end
diff --git a/examples/workflows/start_child_workflow_workflow.rb b/examples/workflows/start_child_workflow_workflow.rb
new file mode 100644
index 00000000..de6bb9b0
--- /dev/null
+++ b/examples/workflows/start_child_workflow_workflow.rb
@@ -0,0 +1,17 @@
+require 'workflows/slow_child_workflow'
+
+class StartChildWorkflowWorkflow < Temporal::Workflow
+ def execute(child_workflow_id)
+ options = {
+ workflow_id: child_workflow_id,
+ parent_close_policy: :abandon,
+ }
+ result = SlowChildWorkflow.execute(1, options: options)
+ child_workflow_execution = result.child_workflow_execution_future.get
+
+ # return back the workflow_id and run_id so we can nicely check if
+ # everything was passed correctly
+ response = Struct.new(:workflow_id, :run_id)
+ response.new(child_workflow_execution.workflow_id, child_workflow_execution.run_id)
+ end
+end
diff --git a/examples/workflows/upsert_search_attributes_workflow.rb b/examples/workflows/upsert_search_attributes_workflow.rb
new file mode 100644
index 00000000..4657628f
--- /dev/null
+++ b/examples/workflows/upsert_search_attributes_workflow.rb
@@ -0,0 +1,39 @@
+require 'activities/hello_world_activity'
+class UpsertSearchAttributesWorkflow < Temporal::Workflow
+ # time_value example: use this format: Time.now.utc.strftime("%Y-%m-%dT%H:%M:%SZ")
+ # values comes from keyword args passed to start_workflow
+ def execute(values)
+ # These are included in the default temporal docker setup.
+ # Run tctl admin cluster get-search-attributes to list the options and
+ # See https://docs.temporal.io/docs/tctl/how-to-add-a-custom-search-attribute-to-a-cluster-using-tctl
+ # for instructions on adding them.
+ attributes = {
+ 'CustomStringField' => values[:string_value],
+ 'CustomBoolField' => values[:bool_value],
+ 'CustomDoubleField' => values[:float_value],
+ 'CustomIntField' => values[:int_value],
+ 'CustomDatetimeField' => values[:time_value],
+ }
+ attributes.compact!
+ workflow.upsert_search_attributes(attributes)
+ # .dup because the same backing hash may be used throughout the workflow, causing
+ # the equality check at the end to succeed incorrectly
+ attributes_after_upsert = workflow.search_attributes.dup
+
+ # The following lines are extra complexity to test if upsert_search_attributes is tracked properly in the internal
+ # state machine.
+ future = HelloWorldActivity.execute("Moon")
+
+ name = workflow.side_effect { SecureRandom.uuid }
+ workflow.wait_for_all(future)
+
+ HelloWorldActivity.execute!(name)
+
+ attributes_at_end = workflow.search_attributes
+ if attributes_at_end != attributes_after_upsert
+ raise "Attributes at end #{attributes_at_end} don't match after upsert #{attributes_after_upsert}"
+ end
+
+ attributes_at_end
+ end
+end
diff --git a/examples/workflows/wait_for_external_signal_workflow.rb b/examples/workflows/wait_for_external_signal_workflow.rb
new file mode 100644
index 00000000..69bd8eea
--- /dev/null
+++ b/examples/workflows/wait_for_external_signal_workflow.rb
@@ -0,0 +1,22 @@
+# One workflow sends a signal to another workflow. Can be used to implement
+# the synchronous-proxy pattern (see Go samples)
+#
+class WaitForExternalSignalWorkflow < Temporal::Workflow
+ def execute(expected_signal)
+ signals_received = {}
+ signal_counts = Hash.new { |h,k| h[k] = 0 }
+
+ workflow.on_signal do |signal, input|
+ workflow.logger.info("Received signal name #{signal}, with input #{input.inspect}")
+ signals_received[signal] = input
+ signal_counts[signal] += 1
+ end
+
+ workflow.wait_until do
+ workflow.logger.info("Awaiting #{expected_signal}, signals received so far: #{signals_received}")
+ signals_received.key?(expected_signal)
+ end
+
+ { received: signals_received, counts: signal_counts }
+ end
+end
diff --git a/examples/workflows/wait_for_named_signal_workflow.rb b/examples/workflows/wait_for_named_signal_workflow.rb
new file mode 100644
index 00000000..96f96ece
--- /dev/null
+++ b/examples/workflows/wait_for_named_signal_workflow.rb
@@ -0,0 +1,27 @@
+# Can receive signals to its named signal handler. If a signal doesn't match the
+# named handler's signature, then it matches the catch-all signal handler
+#
+class WaitForNamedSignalWorkflow < Temporal::Workflow
+ def execute(expected_signal)
+ signals_received = {}
+ signal_counts = Hash.new { |h,k| h[k] = 0 }
+
+ # catch-all handler
+ workflow.on_signal do |signal, input|
+ workflow.logger.info("Received signal name as #{signal}, with input #{input.inspect}")
+ signals_received['catch-all'] = input
+ signal_counts['catch-all'] += 1
+ end
+
+ workflow.on_signal('NamedSignal') do |input|
+ workflow.logger.info("Received signal name -NamedSignal-, with input #{input.inspect}")
+ signals_received['NamedSignal'] = input
+ signal_counts['NamedSignal'] += 1
+ end
+
+ timeout_timer = workflow.start_timer(1)
+ workflow.wait_for_any(timeout_timer)
+
+ { received: signals_received, counts: signal_counts }
+ end
+end
diff --git a/examples/workflows/wait_for_workflow.rb b/examples/workflows/wait_for_workflow.rb
new file mode 100644
index 00000000..6b26c28d
--- /dev/null
+++ b/examples/workflows/wait_for_workflow.rb
@@ -0,0 +1,66 @@
+require 'activities/echo_activity'
+require 'activities/long_running_activity'
+
+# This example workflow exercises all three conditions that can change state that is being
+# awaited upon: activity completion, sleep completion, signal receieved.
+class WaitForWorkflow < Temporal::Workflow
+ def execute(total_echos, max_echos_at_once, expected_signal)
+ signals_received = {}
+
+ workflow.on_signal do |signal, input|
+ signals_received[signal] = input
+ end
+
+ workflow.wait_until do
+ workflow.logger.info("Awaiting #{expected_signal}, signals received so far: #{signals_received}")
+ signals_received.key?(expected_signal)
+ end
+
+ # Run an activity but with a max time limit by starting a timer. This activity
+ # will not complete before the timer, which may result in a failed activity task after the
+ # workflow is completed.
+ long_running_future = LongRunningActivity.execute(15, 0.1)
+ timeout_timer = workflow.start_timer(1)
+ workflow.wait_for_any(timeout_timer, long_running_future)
+
+ timer_beat_activity = timeout_timer.finished? && !long_running_future.finished?
+
+ # This should not wait further. The first future has already finished, and therefore
+ # the second one should not be awaited upon.
+ long_timeout_timer = workflow.start_timer(15)
+ workflow.wait_for_any(timeout_timer, long_timeout_timer)
+ raise 'The workflow should not have waited for this timer to complete' if long_timeout_timer.finished?
+
+ activity_futures = {}
+ echos_completed = 0
+
+ total_echos.times do |i|
+ workflow.wait_until do
+ workflow.logger.info("Activities in flight #{activity_futures.length}")
+ # Pause workflow until the number of active activity futures is less than 2. This
+ # will throttle new activities from being started, guaranteeing that only two of these
+ # activities are running at once.
+ activity_futures.length < max_echos_at_once
+ end
+
+ future = EchoActivity.execute("hi #{i}")
+ activity_futures[i] = future
+
+ future.done do
+ activity_futures.delete(i)
+ echos_completed += 1
+ end
+ end
+
+ workflow.wait_until do
+ workflow.logger.info("Waiting for queue to drain, size: #{activity_futures.length}")
+ activity_futures.empty?
+ end
+
+ {
+ signal: signals_received.key?(expected_signal),
+ timer: timer_beat_activity,
+ activity: echos_completed == total_echos
+ }
+ end
+end
diff --git a/lib/gen/dependencies/gogoproto/gogo_pb.rb b/lib/gen/dependencies/gogoproto/gogo_pb.rb
new file mode 100644
index 00000000..d63bf773
--- /dev/null
+++ b/lib/gen/dependencies/gogoproto/gogo_pb.rb
@@ -0,0 +1,14 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: dependencies/gogoproto/gogo.proto
+
+require 'google/protobuf'
+
+require 'google/protobuf/descriptor_pb'
+
+Google::Protobuf::DescriptorPool.generated_pool.build do
+ add_file("dependencies/gogoproto/gogo.proto", :syntax => :proto2) do
+ end
+end
+
+module Gogoproto
+end
diff --git a/lib/gen/temporal/api/batch/v1/message_pb.rb b/lib/gen/temporal/api/batch/v1/message_pb.rb
new file mode 100644
index 00000000..5f1d88e4
--- /dev/null
+++ b/lib/gen/temporal/api/batch/v1/message_pb.rb
@@ -0,0 +1,50 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: temporal/api/batch/v1/message.proto
+
+require 'google/protobuf'
+
+require 'dependencies/gogoproto/gogo_pb'
+require 'google/protobuf/timestamp_pb'
+require 'temporal/api/common/v1/message_pb'
+require 'temporal/api/enums/v1/batch_operation_pb'
+
+Google::Protobuf::DescriptorPool.generated_pool.build do
+ add_file("temporal/api/batch/v1/message.proto", :syntax => :proto3) do
+ add_message "temporal.api.batch.v1.BatchOperationInfo" do
+ optional :job_id, :string, 1
+ optional :state, :enum, 2, "temporal.api.enums.v1.BatchOperationState"
+ optional :start_time, :message, 3, "google.protobuf.Timestamp"
+ optional :close_time, :message, 4, "google.protobuf.Timestamp"
+ end
+ add_message "temporal.api.batch.v1.BatchOperationTermination" do
+ optional :details, :message, 1, "temporal.api.common.v1.Payloads"
+ optional :identity, :string, 2
+ end
+ add_message "temporal.api.batch.v1.BatchOperationSignal" do
+ optional :signal, :string, 1
+ optional :input, :message, 2, "temporal.api.common.v1.Payloads"
+ optional :header, :message, 3, "temporal.api.common.v1.Header"
+ optional :identity, :string, 4
+ end
+ add_message "temporal.api.batch.v1.BatchOperationCancellation" do
+ optional :identity, :string, 1
+ end
+ add_message "temporal.api.batch.v1.BatchOperationDeletion" do
+ optional :identity, :string, 1
+ end
+ end
+end
+
+module Temporalio
+ module Api
+ module Batch
+ module V1
+ BatchOperationInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.batch.v1.BatchOperationInfo").msgclass
+ BatchOperationTermination = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.batch.v1.BatchOperationTermination").msgclass
+ BatchOperationSignal = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.batch.v1.BatchOperationSignal").msgclass
+ BatchOperationCancellation = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.batch.v1.BatchOperationCancellation").msgclass
+ BatchOperationDeletion = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.batch.v1.BatchOperationDeletion").msgclass
+ end
+ end
+ end
+end
diff --git a/lib/gen/temporal/api/command/v1/message_pb.rb b/lib/gen/temporal/api/command/v1/message_pb.rb
index fa13cc7a..0dd08254 100644
--- a/lib/gen/temporal/api/command/v1/message_pb.rb
+++ b/lib/gen/temporal/api/command/v1/message_pb.rb
@@ -4,17 +4,18 @@
require 'google/protobuf'
require 'google/protobuf/duration_pb'
+require 'dependencies/gogoproto/gogo_pb'
require 'temporal/api/enums/v1/workflow_pb'
require 'temporal/api/enums/v1/command_type_pb'
require 'temporal/api/common/v1/message_pb'
require 'temporal/api/failure/v1/message_pb'
require 'temporal/api/taskqueue/v1/message_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/command/v1/message.proto", :syntax => :proto3) do
add_message "temporal.api.command.v1.ScheduleActivityTaskCommandAttributes" do
optional :activity_id, :string, 1
optional :activity_type, :message, 2, "temporal.api.common.v1.ActivityType"
- optional :namespace, :string, 3
optional :task_queue, :message, 4, "temporal.api.taskqueue.v1.TaskQueue"
optional :header, :message, 5, "temporal.api.common.v1.Header"
optional :input, :message, 6, "temporal.api.common.v1.Payloads"
@@ -23,6 +24,7 @@
optional :start_to_close_timeout, :message, 9, "google.protobuf.Duration"
optional :heartbeat_timeout, :message, 10, "google.protobuf.Duration"
optional :retry_policy, :message, 11, "temporal.api.common.v1.RetryPolicy"
+ optional :request_eager_execution, :bool, 12
end
add_message "temporal.api.command.v1.RequestCancelActivityTaskCommandAttributes" do
optional :scheduled_event_id, :int64, 1
@@ -49,6 +51,7 @@
optional :run_id, :string, 3
optional :control, :string, 4
optional :child_workflow_only, :bool, 5
+ optional :reason, :string, 6
end
add_message "temporal.api.command.v1.SignalExternalWorkflowExecutionCommandAttributes" do
optional :namespace, :string, 1
@@ -57,10 +60,14 @@
optional :input, :message, 4, "temporal.api.common.v1.Payloads"
optional :control, :string, 5
optional :child_workflow_only, :bool, 6
+ optional :header, :message, 7, "temporal.api.common.v1.Header"
end
add_message "temporal.api.command.v1.UpsertWorkflowSearchAttributesCommandAttributes" do
optional :search_attributes, :message, 1, "temporal.api.common.v1.SearchAttributes"
end
+ add_message "temporal.api.command.v1.ModifyWorkflowPropertiesCommandAttributes" do
+ optional :upserted_memo, :message, 1, "temporal.api.common.v1.Memo"
+ end
add_message "temporal.api.command.v1.RecordMarkerCommandAttributes" do
optional :marker_name, :string, 1
map :details, :string, :message, 2, "temporal.api.common.v1.Payloads"
@@ -101,6 +108,9 @@
optional :memo, :message, 15, "temporal.api.common.v1.Memo"
optional :search_attributes, :message, 16, "temporal.api.common.v1.SearchAttributes"
end
+ add_message "temporal.api.command.v1.ProtocolMessageCommandAttributes" do
+ optional :message_id, :string, 1
+ end
add_message "temporal.api.command.v1.Command" do
optional :command_type, :enum, 1, "temporal.api.enums.v1.CommandType"
oneof :attributes do
@@ -117,12 +127,14 @@
optional :start_child_workflow_execution_command_attributes, :message, 12, "temporal.api.command.v1.StartChildWorkflowExecutionCommandAttributes"
optional :signal_external_workflow_execution_command_attributes, :message, 13, "temporal.api.command.v1.SignalExternalWorkflowExecutionCommandAttributes"
optional :upsert_workflow_search_attributes_command_attributes, :message, 14, "temporal.api.command.v1.UpsertWorkflowSearchAttributesCommandAttributes"
+ optional :protocol_message_command_attributes, :message, 15, "temporal.api.command.v1.ProtocolMessageCommandAttributes"
+ optional :modify_workflow_properties_command_attributes, :message, 17, "temporal.api.command.v1.ModifyWorkflowPropertiesCommandAttributes"
end
end
end
end
-module Temporal
+module Temporalio
module Api
module Command
module V1
@@ -136,9 +148,11 @@ module V1
RequestCancelExternalWorkflowExecutionCommandAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.command.v1.RequestCancelExternalWorkflowExecutionCommandAttributes").msgclass
SignalExternalWorkflowExecutionCommandAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.command.v1.SignalExternalWorkflowExecutionCommandAttributes").msgclass
UpsertWorkflowSearchAttributesCommandAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.command.v1.UpsertWorkflowSearchAttributesCommandAttributes").msgclass
+ ModifyWorkflowPropertiesCommandAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.command.v1.ModifyWorkflowPropertiesCommandAttributes").msgclass
RecordMarkerCommandAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.command.v1.RecordMarkerCommandAttributes").msgclass
ContinueAsNewWorkflowExecutionCommandAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.command.v1.ContinueAsNewWorkflowExecutionCommandAttributes").msgclass
StartChildWorkflowExecutionCommandAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.command.v1.StartChildWorkflowExecutionCommandAttributes").msgclass
+ ProtocolMessageCommandAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.command.v1.ProtocolMessageCommandAttributes").msgclass
Command = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.command.v1.Command").msgclass
end
end
diff --git a/lib/gen/temporal/api/common/v1/message_pb.rb b/lib/gen/temporal/api/common/v1/message_pb.rb
index 281878d8..52c50880 100644
--- a/lib/gen/temporal/api/common/v1/message_pb.rb
+++ b/lib/gen/temporal/api/common/v1/message_pb.rb
@@ -4,7 +4,9 @@
require 'google/protobuf'
require 'google/protobuf/duration_pb'
+require 'dependencies/gogoproto/gogo_pb'
require 'temporal/api/enums/v1/common_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/common/v1/message.proto", :syntax => :proto3) do
add_message "temporal.api.common.v1.DataBlob" do
@@ -44,10 +46,20 @@
optional :maximum_attempts, :int32, 4
repeated :non_retryable_error_types, :string, 5
end
+ add_message "temporal.api.common.v1.MeteringMetadata" do
+ optional :nonfirst_local_activity_execution_attempts, :uint32, 13
+ end
+ add_message "temporal.api.common.v1.WorkerVersionStamp" do
+ optional :build_id, :string, 1
+ optional :bundle_id, :string, 2
+ end
+ add_message "temporal.api.common.v1.WorkerVersionCapabilities" do
+ optional :build_id, :string, 1
+ end
end
end
-module Temporal
+module Temporalio
module Api
module Common
module V1
@@ -61,6 +73,9 @@ module V1
WorkflowType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.common.v1.WorkflowType").msgclass
ActivityType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.common.v1.ActivityType").msgclass
RetryPolicy = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.common.v1.RetryPolicy").msgclass
+ MeteringMetadata = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.common.v1.MeteringMetadata").msgclass
+ WorkerVersionStamp = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.common.v1.WorkerVersionStamp").msgclass
+ WorkerVersionCapabilities = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.common.v1.WorkerVersionCapabilities").msgclass
end
end
end
diff --git a/lib/gen/temporal/api/enums/v1/batch_operation_pb.rb b/lib/gen/temporal/api/enums/v1/batch_operation_pb.rb
new file mode 100644
index 00000000..aff7e54f
--- /dev/null
+++ b/lib/gen/temporal/api/enums/v1/batch_operation_pb.rb
@@ -0,0 +1,33 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: temporal/api/enums/v1/batch_operation.proto
+
+require 'google/protobuf'
+
+Google::Protobuf::DescriptorPool.generated_pool.build do
+ add_file("temporal/api/enums/v1/batch_operation.proto", :syntax => :proto3) do
+ add_enum "temporal.api.enums.v1.BatchOperationType" do
+ value :BATCH_OPERATION_TYPE_UNSPECIFIED, 0
+ value :BATCH_OPERATION_TYPE_TERMINATE, 1
+ value :BATCH_OPERATION_TYPE_CANCEL, 2
+ value :BATCH_OPERATION_TYPE_SIGNAL, 3
+ value :BATCH_OPERATION_TYPE_DELETE, 4
+ end
+ add_enum "temporal.api.enums.v1.BatchOperationState" do
+ value :BATCH_OPERATION_STATE_UNSPECIFIED, 0
+ value :BATCH_OPERATION_STATE_RUNNING, 1
+ value :BATCH_OPERATION_STATE_COMPLETED, 2
+ value :BATCH_OPERATION_STATE_FAILED, 3
+ end
+ end
+end
+
+module Temporalio
+ module Api
+ module Enums
+ module V1
+ BatchOperationType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.BatchOperationType").enummodule
+ BatchOperationState = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.BatchOperationState").enummodule
+ end
+ end
+ end
+end
diff --git a/lib/gen/temporal/api/enums/v1/command_type_pb.rb b/lib/gen/temporal/api/enums/v1/command_type_pb.rb
index d77269e0..fc4270ea 100644
--- a/lib/gen/temporal/api/enums/v1/command_type_pb.rb
+++ b/lib/gen/temporal/api/enums/v1/command_type_pb.rb
@@ -20,11 +20,13 @@
value :COMMAND_TYPE_START_CHILD_WORKFLOW_EXECUTION, 11
value :COMMAND_TYPE_SIGNAL_EXTERNAL_WORKFLOW_EXECUTION, 12
value :COMMAND_TYPE_UPSERT_WORKFLOW_SEARCH_ATTRIBUTES, 13
+ value :COMMAND_TYPE_PROTOCOL_MESSAGE, 14
+ value :COMMAND_TYPE_MODIFY_WORKFLOW_PROPERTIES, 16
end
end
end
-module Temporal
+module Temporalio
module Api
module Enums
module V1
diff --git a/lib/gen/temporal/api/enums/v1/common_pb.rb b/lib/gen/temporal/api/enums/v1/common_pb.rb
index 3b1ca3b6..56c53671 100644
--- a/lib/gen/temporal/api/enums/v1/common_pb.rb
+++ b/lib/gen/temporal/api/enums/v1/common_pb.rb
@@ -12,12 +12,13 @@
end
add_enum "temporal.api.enums.v1.IndexedValueType" do
value :INDEXED_VALUE_TYPE_UNSPECIFIED, 0
- value :INDEXED_VALUE_TYPE_STRING, 1
+ value :INDEXED_VALUE_TYPE_TEXT, 1
value :INDEXED_VALUE_TYPE_KEYWORD, 2
value :INDEXED_VALUE_TYPE_INT, 3
value :INDEXED_VALUE_TYPE_DOUBLE, 4
value :INDEXED_VALUE_TYPE_BOOL, 5
value :INDEXED_VALUE_TYPE_DATETIME, 6
+ value :INDEXED_VALUE_TYPE_KEYWORD_LIST, 7
end
add_enum "temporal.api.enums.v1.Severity" do
value :SEVERITY_UNSPECIFIED, 0
@@ -28,7 +29,7 @@
end
end
-module Temporal
+module Temporalio
module Api
module Enums
module V1
diff --git a/lib/gen/temporal/api/enums/v1/event_type_pb.rb b/lib/gen/temporal/api/enums/v1/event_type_pb.rb
index d1fa17b7..b18c13c6 100644
--- a/lib/gen/temporal/api/enums/v1/event_type_pb.rb
+++ b/lib/gen/temporal/api/enums/v1/event_type_pb.rb
@@ -47,11 +47,17 @@
value :EVENT_TYPE_SIGNAL_EXTERNAL_WORKFLOW_EXECUTION_FAILED, 38
value :EVENT_TYPE_EXTERNAL_WORKFLOW_EXECUTION_SIGNALED, 39
value :EVENT_TYPE_UPSERT_WORKFLOW_SEARCH_ATTRIBUTES, 40
+ value :EVENT_TYPE_WORKFLOW_EXECUTION_UPDATE_ACCEPTED, 41
+ value :EVENT_TYPE_WORKFLOW_EXECUTION_UPDATE_REJECTED, 42
+ value :EVENT_TYPE_WORKFLOW_EXECUTION_UPDATE_COMPLETED, 43
+ value :EVENT_TYPE_WORKFLOW_PROPERTIES_MODIFIED_EXTERNALLY, 44
+ value :EVENT_TYPE_ACTIVITY_PROPERTIES_MODIFIED_EXTERNALLY, 45
+ value :EVENT_TYPE_WORKFLOW_PROPERTIES_MODIFIED, 46
end
end
end
-module Temporal
+module Temporalio
module Api
module Enums
module V1
diff --git a/lib/gen/temporal/api/enums/v1/failed_cause_pb.rb b/lib/gen/temporal/api/enums/v1/failed_cause_pb.rb
index f61ea661..4986e3ac 100644
--- a/lib/gen/temporal/api/enums/v1/failed_cause_pb.rb
+++ b/lib/gen/temporal/api/enums/v1/failed_cause_pb.rb
@@ -30,23 +30,42 @@
value :WORKFLOW_TASK_FAILED_CAUSE_BAD_BINARY, 21
value :WORKFLOW_TASK_FAILED_CAUSE_SCHEDULE_ACTIVITY_DUPLICATE_ID, 22
value :WORKFLOW_TASK_FAILED_CAUSE_BAD_SEARCH_ATTRIBUTES, 23
+ value :WORKFLOW_TASK_FAILED_CAUSE_NON_DETERMINISTIC_ERROR, 24
+ value :WORKFLOW_TASK_FAILED_CAUSE_BAD_MODIFY_WORKFLOW_PROPERTIES_ATTRIBUTES, 25
+ value :WORKFLOW_TASK_FAILED_CAUSE_PENDING_CHILD_WORKFLOWS_LIMIT_EXCEEDED, 26
+ value :WORKFLOW_TASK_FAILED_CAUSE_PENDING_ACTIVITIES_LIMIT_EXCEEDED, 27
+ value :WORKFLOW_TASK_FAILED_CAUSE_PENDING_SIGNALS_LIMIT_EXCEEDED, 28
+ value :WORKFLOW_TASK_FAILED_CAUSE_PENDING_REQUEST_CANCEL_LIMIT_EXCEEDED, 29
+ value :WORKFLOW_TASK_FAILED_CAUSE_BAD_UPDATE_WORKFLOW_EXECUTION_MESSAGE, 30
+ value :WORKFLOW_TASK_FAILED_CAUSE_UNHANDLED_UPDATE, 31
end
add_enum "temporal.api.enums.v1.StartChildWorkflowExecutionFailedCause" do
value :START_CHILD_WORKFLOW_EXECUTION_FAILED_CAUSE_UNSPECIFIED, 0
value :START_CHILD_WORKFLOW_EXECUTION_FAILED_CAUSE_WORKFLOW_ALREADY_EXISTS, 1
+ value :START_CHILD_WORKFLOW_EXECUTION_FAILED_CAUSE_NAMESPACE_NOT_FOUND, 2
end
add_enum "temporal.api.enums.v1.CancelExternalWorkflowExecutionFailedCause" do
value :CANCEL_EXTERNAL_WORKFLOW_EXECUTION_FAILED_CAUSE_UNSPECIFIED, 0
value :CANCEL_EXTERNAL_WORKFLOW_EXECUTION_FAILED_CAUSE_EXTERNAL_WORKFLOW_EXECUTION_NOT_FOUND, 1
+ value :CANCEL_EXTERNAL_WORKFLOW_EXECUTION_FAILED_CAUSE_NAMESPACE_NOT_FOUND, 2
end
add_enum "temporal.api.enums.v1.SignalExternalWorkflowExecutionFailedCause" do
value :SIGNAL_EXTERNAL_WORKFLOW_EXECUTION_FAILED_CAUSE_UNSPECIFIED, 0
value :SIGNAL_EXTERNAL_WORKFLOW_EXECUTION_FAILED_CAUSE_EXTERNAL_WORKFLOW_EXECUTION_NOT_FOUND, 1
+ value :SIGNAL_EXTERNAL_WORKFLOW_EXECUTION_FAILED_CAUSE_NAMESPACE_NOT_FOUND, 2
+ value :SIGNAL_EXTERNAL_WORKFLOW_EXECUTION_FAILED_CAUSE_SIGNAL_COUNT_LIMIT_EXCEEDED, 3
+ end
+ add_enum "temporal.api.enums.v1.ResourceExhaustedCause" do
+ value :RESOURCE_EXHAUSTED_CAUSE_UNSPECIFIED, 0
+ value :RESOURCE_EXHAUSTED_CAUSE_RPS_LIMIT, 1
+ value :RESOURCE_EXHAUSTED_CAUSE_CONCURRENT_LIMIT, 2
+ value :RESOURCE_EXHAUSTED_CAUSE_SYSTEM_OVERLOADED, 3
+ value :RESOURCE_EXHAUSTED_CAUSE_PERSISTENCE_LIMIT, 4
end
end
end
-module Temporal
+module Temporalio
module Api
module Enums
module V1
@@ -54,6 +73,7 @@ module V1
StartChildWorkflowExecutionFailedCause = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.StartChildWorkflowExecutionFailedCause").enummodule
CancelExternalWorkflowExecutionFailedCause = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.CancelExternalWorkflowExecutionFailedCause").enummodule
SignalExternalWorkflowExecutionFailedCause = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.SignalExternalWorkflowExecutionFailedCause").enummodule
+ ResourceExhaustedCause = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.ResourceExhaustedCause").enummodule
end
end
end
diff --git a/lib/gen/temporal/api/enums/v1/namespace_pb.rb b/lib/gen/temporal/api/enums/v1/namespace_pb.rb
index 65980db0..d8478407 100644
--- a/lib/gen/temporal/api/enums/v1/namespace_pb.rb
+++ b/lib/gen/temporal/api/enums/v1/namespace_pb.rb
@@ -16,15 +16,21 @@
value :ARCHIVAL_STATE_DISABLED, 1
value :ARCHIVAL_STATE_ENABLED, 2
end
+ add_enum "temporal.api.enums.v1.ReplicationState" do
+ value :REPLICATION_STATE_UNSPECIFIED, 0
+ value :REPLICATION_STATE_NORMAL, 1
+ value :REPLICATION_STATE_HANDOVER, 2
+ end
end
end
-module Temporal
+module Temporalio
module Api
module Enums
module V1
NamespaceState = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.NamespaceState").enummodule
ArchivalState = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.ArchivalState").enummodule
+ ReplicationState = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.ReplicationState").enummodule
end
end
end
diff --git a/lib/gen/temporal/api/enums/v1/query_pb.rb b/lib/gen/temporal/api/enums/v1/query_pb.rb
index d6eaabb2..34e4f770 100644
--- a/lib/gen/temporal/api/enums/v1/query_pb.rb
+++ b/lib/gen/temporal/api/enums/v1/query_pb.rb
@@ -19,7 +19,7 @@
end
end
-module Temporal
+module Temporalio
module Api
module Enums
module V1
diff --git a/lib/gen/temporal/api/enums/v1/reset_pb.rb b/lib/gen/temporal/api/enums/v1/reset_pb.rb
new file mode 100644
index 00000000..6eb81b8d
--- /dev/null
+++ b/lib/gen/temporal/api/enums/v1/reset_pb.rb
@@ -0,0 +1,24 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: temporal/api/enums/v1/reset.proto
+
+require 'google/protobuf'
+
+Google::Protobuf::DescriptorPool.generated_pool.build do
+ add_file("temporal/api/enums/v1/reset.proto", :syntax => :proto3) do
+ add_enum "temporal.api.enums.v1.ResetReapplyType" do
+ value :RESET_REAPPLY_TYPE_UNSPECIFIED, 0
+ value :RESET_REAPPLY_TYPE_SIGNAL, 1
+ value :RESET_REAPPLY_TYPE_NONE, 2
+ end
+ end
+end
+
+module Temporalio
+ module Api
+ module Enums
+ module V1
+ ResetReapplyType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.ResetReapplyType").enummodule
+ end
+ end
+ end
+end
diff --git a/lib/gen/temporal/api/enums/v1/schedule_pb.rb b/lib/gen/temporal/api/enums/v1/schedule_pb.rb
new file mode 100644
index 00000000..14d7b311
--- /dev/null
+++ b/lib/gen/temporal/api/enums/v1/schedule_pb.rb
@@ -0,0 +1,28 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: temporal/api/enums/v1/schedule.proto
+
+require 'google/protobuf'
+
+Google::Protobuf::DescriptorPool.generated_pool.build do
+ add_file("temporal/api/enums/v1/schedule.proto", :syntax => :proto3) do
+ add_enum "temporal.api.enums.v1.ScheduleOverlapPolicy" do
+ value :SCHEDULE_OVERLAP_POLICY_UNSPECIFIED, 0
+ value :SCHEDULE_OVERLAP_POLICY_SKIP, 1
+ value :SCHEDULE_OVERLAP_POLICY_BUFFER_ONE, 2
+ value :SCHEDULE_OVERLAP_POLICY_BUFFER_ALL, 3
+ value :SCHEDULE_OVERLAP_POLICY_CANCEL_OTHER, 4
+ value :SCHEDULE_OVERLAP_POLICY_TERMINATE_OTHER, 5
+ value :SCHEDULE_OVERLAP_POLICY_ALLOW_ALL, 6
+ end
+ end
+end
+
+module Temporalio
+ module Api
+ module Enums
+ module V1
+ ScheduleOverlapPolicy = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.ScheduleOverlapPolicy").enummodule
+ end
+ end
+ end
+end
diff --git a/lib/gen/temporal/api/enums/v1/task_queue_pb.rb b/lib/gen/temporal/api/enums/v1/task_queue_pb.rb
index bc0a59f6..53d99653 100644
--- a/lib/gen/temporal/api/enums/v1/task_queue_pb.rb
+++ b/lib/gen/temporal/api/enums/v1/task_queue_pb.rb
@@ -18,7 +18,7 @@
end
end
-module Temporal
+module Temporalio
module Api
module Enums
module V1
diff --git a/lib/gen/temporal/api/enums/v1/update_pb.rb b/lib/gen/temporal/api/enums/v1/update_pb.rb
new file mode 100644
index 00000000..205d140a
--- /dev/null
+++ b/lib/gen/temporal/api/enums/v1/update_pb.rb
@@ -0,0 +1,25 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: temporal/api/enums/v1/update.proto
+
+require 'google/protobuf'
+
+Google::Protobuf::DescriptorPool.generated_pool.build do
+ add_file("temporal/api/enums/v1/update.proto", :syntax => :proto3) do
+ add_enum "temporal.api.enums.v1.UpdateWorkflowExecutionLifecycleStage" do
+ value :UPDATE_WORKFLOW_EXECUTION_LIFECYCLE_STAGE_UNSPECIFIED, 0
+ value :UPDATE_WORKFLOW_EXECUTION_LIFECYCLE_STAGE_ADMITTED, 1
+ value :UPDATE_WORKFLOW_EXECUTION_LIFECYCLE_STAGE_ACCEPTED, 2
+ value :UPDATE_WORKFLOW_EXECUTION_LIFECYCLE_STAGE_COMPLETED, 3
+ end
+ end
+end
+
+module Temporalio
+ module Api
+ module Enums
+ module V1
+ UpdateWorkflowExecutionLifecycleStage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.UpdateWorkflowExecutionLifecycleStage").enummodule
+ end
+ end
+ end
+end
diff --git a/lib/gen/temporal/api/enums/v1/workflow_pb.rb b/lib/gen/temporal/api/enums/v1/workflow_pb.rb
index e65f3cd9..e640dc9c 100644
--- a/lib/gen/temporal/api/enums/v1/workflow_pb.rb
+++ b/lib/gen/temporal/api/enums/v1/workflow_pb.rb
@@ -10,6 +10,7 @@
value :WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE, 1
value :WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE_FAILED_ONLY, 2
value :WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE, 3
+ value :WORKFLOW_ID_REUSE_POLICY_TERMINATE_IF_RUNNING, 4
end
add_enum "temporal.api.enums.v1.ParentClosePolicy" do
value :PARENT_CLOSE_POLICY_UNSPECIFIED, 0
@@ -39,6 +40,11 @@
value :PENDING_ACTIVITY_STATE_STARTED, 2
value :PENDING_ACTIVITY_STATE_CANCEL_REQUESTED, 3
end
+ add_enum "temporal.api.enums.v1.PendingWorkflowTaskState" do
+ value :PENDING_WORKFLOW_TASK_STATE_UNSPECIFIED, 0
+ value :PENDING_WORKFLOW_TASK_STATE_SCHEDULED, 1
+ value :PENDING_WORKFLOW_TASK_STATE_STARTED, 2
+ end
add_enum "temporal.api.enums.v1.HistoryEventFilterType" do
value :HISTORY_EVENT_FILTER_TYPE_UNSPECIFIED, 0
value :HISTORY_EVENT_FILTER_TYPE_ALL_EVENT, 1
@@ -64,7 +70,7 @@
end
end
-module Temporal
+module Temporalio
module Api
module Enums
module V1
@@ -73,6 +79,7 @@ module V1
ContinueAsNewInitiator = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.ContinueAsNewInitiator").enummodule
WorkflowExecutionStatus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.WorkflowExecutionStatus").enummodule
PendingActivityState = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.PendingActivityState").enummodule
+ PendingWorkflowTaskState = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.PendingWorkflowTaskState").enummodule
HistoryEventFilterType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.HistoryEventFilterType").enummodule
RetryState = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.RetryState").enummodule
TimeoutType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.enums.v1.TimeoutType").enummodule
diff --git a/lib/gen/temporal/api/errordetails/v1/message_pb.rb b/lib/gen/temporal/api/errordetails/v1/message_pb.rb
index 64cf7725..87c68595 100644
--- a/lib/gen/temporal/api/errordetails/v1/message_pb.rb
+++ b/lib/gen/temporal/api/errordetails/v1/message_pb.rb
@@ -3,6 +3,10 @@
require 'google/protobuf'
+require 'temporal/api/common/v1/message_pb'
+require 'temporal/api/enums/v1/failed_cause_pb'
+require 'temporal/api/enums/v1/namespace_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/errordetails/v1/message.proto", :syntax => :proto3) do
add_message "temporal.api.errordetails.v1.NotFoundFailure" do
@@ -18,6 +22,16 @@
optional :current_cluster, :string, 2
optional :active_cluster, :string, 3
end
+ add_message "temporal.api.errordetails.v1.NamespaceInvalidStateFailure" do
+ optional :namespace, :string, 1
+ optional :state, :enum, 2, "temporal.api.enums.v1.NamespaceState"
+ repeated :allowed_states, :enum, 3, "temporal.api.enums.v1.NamespaceState"
+ end
+ add_message "temporal.api.errordetails.v1.NamespaceNotFoundFailure" do
+ optional :namespace, :string, 1
+ end
+ add_message "temporal.api.errordetails.v1.NamespaceAlreadyExistsFailure" do
+ end
add_message "temporal.api.errordetails.v1.ClientVersionNotSupportedFailure" do
optional :client_version, :string, 1
optional :client_name, :string, 2
@@ -27,27 +41,43 @@
optional :server_version, :string, 1
optional :client_supported_server_versions, :string, 2
end
- add_message "temporal.api.errordetails.v1.NamespaceAlreadyExistsFailure" do
- end
add_message "temporal.api.errordetails.v1.CancellationAlreadyRequestedFailure" do
end
add_message "temporal.api.errordetails.v1.QueryFailedFailure" do
end
+ add_message "temporal.api.errordetails.v1.PermissionDeniedFailure" do
+ optional :reason, :string, 1
+ end
+ add_message "temporal.api.errordetails.v1.ResourceExhaustedFailure" do
+ optional :cause, :enum, 1, "temporal.api.enums.v1.ResourceExhaustedCause"
+ end
+ add_message "temporal.api.errordetails.v1.SystemWorkflowFailure" do
+ optional :workflow_execution, :message, 1, "temporal.api.common.v1.WorkflowExecution"
+ optional :workflow_error, :string, 2
+ end
+ add_message "temporal.api.errordetails.v1.WorkflowNotReadyFailure" do
+ end
end
end
-module Temporal
+module Temporalio
module Api
module ErrorDetails
module V1
NotFoundFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.NotFoundFailure").msgclass
WorkflowExecutionAlreadyStartedFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.WorkflowExecutionAlreadyStartedFailure").msgclass
NamespaceNotActiveFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.NamespaceNotActiveFailure").msgclass
+ NamespaceInvalidStateFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.NamespaceInvalidStateFailure").msgclass
+ NamespaceNotFoundFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.NamespaceNotFoundFailure").msgclass
+ NamespaceAlreadyExistsFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.NamespaceAlreadyExistsFailure").msgclass
ClientVersionNotSupportedFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.ClientVersionNotSupportedFailure").msgclass
ServerVersionNotSupportedFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.ServerVersionNotSupportedFailure").msgclass
- NamespaceAlreadyExistsFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.NamespaceAlreadyExistsFailure").msgclass
CancellationAlreadyRequestedFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.CancellationAlreadyRequestedFailure").msgclass
QueryFailedFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.QueryFailedFailure").msgclass
+ PermissionDeniedFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.PermissionDeniedFailure").msgclass
+ ResourceExhaustedFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.ResourceExhaustedFailure").msgclass
+ SystemWorkflowFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.SystemWorkflowFailure").msgclass
+ WorkflowNotReadyFailure = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.errordetails.v1.WorkflowNotReadyFailure").msgclass
end
end
end
diff --git a/lib/gen/temporal/api/failure/v1/message_pb.rb b/lib/gen/temporal/api/failure/v1/message_pb.rb
index 757b4693..041653ff 100644
--- a/lib/gen/temporal/api/failure/v1/message_pb.rb
+++ b/lib/gen/temporal/api/failure/v1/message_pb.rb
@@ -5,6 +5,7 @@
require 'temporal/api/common/v1/message_pb'
require 'temporal/api/enums/v1/workflow_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/failure/v1/message.proto", :syntax => :proto3) do
add_message "temporal.api.failure.v1.ApplicationFailureInfo" do
@@ -47,6 +48,7 @@
optional :message, :string, 1
optional :source, :string, 2
optional :stack_trace, :string, 3
+ optional :encoded_attributes, :message, 20, "temporal.api.common.v1.Payload"
optional :cause, :message, 4, "temporal.api.failure.v1.Failure"
oneof :failure_info do
optional :application_failure_info, :message, 5, "temporal.api.failure.v1.ApplicationFailureInfo"
@@ -62,7 +64,7 @@
end
end
-module Temporal
+module Temporalio
module Api
module Failure
module V1
diff --git a/lib/gen/temporal/api/filter/v1/message_pb.rb b/lib/gen/temporal/api/filter/v1/message_pb.rb
index 5a440018..b38072ce 100644
--- a/lib/gen/temporal/api/filter/v1/message_pb.rb
+++ b/lib/gen/temporal/api/filter/v1/message_pb.rb
@@ -4,7 +4,9 @@
require 'google/protobuf'
require 'google/protobuf/timestamp_pb'
+require 'dependencies/gogoproto/gogo_pb'
require 'temporal/api/enums/v1/workflow_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/filter/v1/message.proto", :syntax => :proto3) do
add_message "temporal.api.filter.v1.WorkflowExecutionFilter" do
@@ -24,7 +26,7 @@
end
end
-module Temporal
+module Temporalio
module Api
module Filter
module V1
diff --git a/lib/gen/temporal/api/history/v1/message_pb.rb b/lib/gen/temporal/api/history/v1/message_pb.rb
index bc3586ae..2b0043ce 100644
--- a/lib/gen/temporal/api/history/v1/message_pb.rb
+++ b/lib/gen/temporal/api/history/v1/message_pb.rb
@@ -5,18 +5,23 @@
require 'google/protobuf/duration_pb'
require 'google/protobuf/timestamp_pb'
+require 'dependencies/gogoproto/gogo_pb'
require 'temporal/api/enums/v1/event_type_pb'
require 'temporal/api/enums/v1/failed_cause_pb'
require 'temporal/api/enums/v1/workflow_pb'
require 'temporal/api/common/v1/message_pb'
require 'temporal/api/failure/v1/message_pb'
-require 'temporal/api/workflow/v1/message_pb'
require 'temporal/api/taskqueue/v1/message_pb'
+require 'temporal/api/update/v1/message_pb'
+require 'temporal/api/workflow/v1/message_pb'
+require 'temporal/api/sdk/v1/task_complete_metadata_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/history/v1/message.proto", :syntax => :proto3) do
add_message "temporal.api.history.v1.WorkflowExecutionStartedEventAttributes" do
optional :workflow_type, :message, 1, "temporal.api.common.v1.WorkflowType"
optional :parent_workflow_namespace, :string, 2
+ optional :parent_workflow_namespace_id, :string, 27
optional :parent_workflow_execution, :message, 3, "temporal.api.common.v1.WorkflowExecution"
optional :parent_initiated_event_id, :int64, 4
optional :task_queue, :message, 5, "temporal.api.taskqueue.v1.TaskQueue"
@@ -40,18 +45,22 @@
optional :search_attributes, :message, 23, "temporal.api.common.v1.SearchAttributes"
optional :prev_auto_reset_points, :message, 24, "temporal.api.workflow.v1.ResetPoints"
optional :header, :message, 25, "temporal.api.common.v1.Header"
+ optional :parent_initiated_event_version, :int64, 26
end
add_message "temporal.api.history.v1.WorkflowExecutionCompletedEventAttributes" do
optional :result, :message, 1, "temporal.api.common.v1.Payloads"
optional :workflow_task_completed_event_id, :int64, 2
+ optional :new_execution_run_id, :string, 3
end
add_message "temporal.api.history.v1.WorkflowExecutionFailedEventAttributes" do
optional :failure, :message, 1, "temporal.api.failure.v1.Failure"
optional :retry_state, :enum, 2, "temporal.api.enums.v1.RetryState"
optional :workflow_task_completed_event_id, :int64, 3
+ optional :new_execution_run_id, :string, 4
end
add_message "temporal.api.history.v1.WorkflowExecutionTimedOutEventAttributes" do
optional :retry_state, :enum, 1, "temporal.api.enums.v1.RetryState"
+ optional :new_execution_run_id, :string, 2
end
add_message "temporal.api.history.v1.WorkflowExecutionContinuedAsNewEventAttributes" do
optional :new_execution_run_id, :string, 1
@@ -78,12 +87,17 @@
optional :scheduled_event_id, :int64, 1
optional :identity, :string, 2
optional :request_id, :string, 3
+ optional :suggest_continue_as_new, :bool, 4
+ optional :history_size_bytes, :int64, 5
end
add_message "temporal.api.history.v1.WorkflowTaskCompletedEventAttributes" do
optional :scheduled_event_id, :int64, 1
optional :started_event_id, :int64, 2
optional :identity, :string, 3
optional :binary_checksum, :string, 4
+ optional :worker_version, :message, 5, "temporal.api.common.v1.WorkerVersionStamp"
+ optional :sdk_metadata, :message, 6, "temporal.api.sdk.v1.WorkflowTaskCompletedMetadata"
+ optional :metering_metadata, :message, 13, "temporal.api.common.v1.MeteringMetadata"
end
add_message "temporal.api.history.v1.WorkflowTaskTimedOutEventAttributes" do
optional :scheduled_event_id, :int64, 1
@@ -104,7 +118,6 @@
add_message "temporal.api.history.v1.ActivityTaskScheduledEventAttributes" do
optional :activity_id, :string, 1
optional :activity_type, :message, 2, "temporal.api.common.v1.ActivityType"
- optional :namespace, :string, 3
optional :task_queue, :message, 4, "temporal.api.taskqueue.v1.TaskQueue"
optional :header, :message, 5, "temporal.api.common.v1.Header"
optional :input, :message, 6, "temporal.api.common.v1.Payloads"
@@ -188,6 +201,8 @@
optional :signal_name, :string, 1
optional :input, :message, 2, "temporal.api.common.v1.Payloads"
optional :identity, :string, 3
+ optional :header, :message, 4, "temporal.api.common.v1.Header"
+ optional :skip_generate_workflow_task, :bool, 5
end
add_message "temporal.api.history.v1.WorkflowExecutionTerminatedEventAttributes" do
optional :reason, :string, 1
@@ -197,14 +212,17 @@
add_message "temporal.api.history.v1.RequestCancelExternalWorkflowExecutionInitiatedEventAttributes" do
optional :workflow_task_completed_event_id, :int64, 1
optional :namespace, :string, 2
+ optional :namespace_id, :string, 7
optional :workflow_execution, :message, 3, "temporal.api.common.v1.WorkflowExecution"
optional :control, :string, 4
optional :child_workflow_only, :bool, 5
+ optional :reason, :string, 6
end
add_message "temporal.api.history.v1.RequestCancelExternalWorkflowExecutionFailedEventAttributes" do
optional :cause, :enum, 1, "temporal.api.enums.v1.CancelExternalWorkflowExecutionFailedCause"
optional :workflow_task_completed_event_id, :int64, 2
optional :namespace, :string, 3
+ optional :namespace_id, :string, 7
optional :workflow_execution, :message, 4, "temporal.api.common.v1.WorkflowExecution"
optional :initiated_event_id, :int64, 5
optional :control, :string, 6
@@ -212,21 +230,25 @@
add_message "temporal.api.history.v1.ExternalWorkflowExecutionCancelRequestedEventAttributes" do
optional :initiated_event_id, :int64, 1
optional :namespace, :string, 2
+ optional :namespace_id, :string, 4
optional :workflow_execution, :message, 3, "temporal.api.common.v1.WorkflowExecution"
end
add_message "temporal.api.history.v1.SignalExternalWorkflowExecutionInitiatedEventAttributes" do
optional :workflow_task_completed_event_id, :int64, 1
optional :namespace, :string, 2
+ optional :namespace_id, :string, 9
optional :workflow_execution, :message, 3, "temporal.api.common.v1.WorkflowExecution"
optional :signal_name, :string, 4
optional :input, :message, 5, "temporal.api.common.v1.Payloads"
optional :control, :string, 6
optional :child_workflow_only, :bool, 7
+ optional :header, :message, 8, "temporal.api.common.v1.Header"
end
add_message "temporal.api.history.v1.SignalExternalWorkflowExecutionFailedEventAttributes" do
optional :cause, :enum, 1, "temporal.api.enums.v1.SignalExternalWorkflowExecutionFailedCause"
optional :workflow_task_completed_event_id, :int64, 2
optional :namespace, :string, 3
+ optional :namespace_id, :string, 7
optional :workflow_execution, :message, 4, "temporal.api.common.v1.WorkflowExecution"
optional :initiated_event_id, :int64, 5
optional :control, :string, 6
@@ -234,6 +256,7 @@
add_message "temporal.api.history.v1.ExternalWorkflowExecutionSignaledEventAttributes" do
optional :initiated_event_id, :int64, 1
optional :namespace, :string, 2
+ optional :namespace_id, :string, 5
optional :workflow_execution, :message, 3, "temporal.api.common.v1.WorkflowExecution"
optional :control, :string, 4
end
@@ -241,8 +264,13 @@
optional :workflow_task_completed_event_id, :int64, 1
optional :search_attributes, :message, 2, "temporal.api.common.v1.SearchAttributes"
end
+ add_message "temporal.api.history.v1.WorkflowPropertiesModifiedEventAttributes" do
+ optional :workflow_task_completed_event_id, :int64, 1
+ optional :upserted_memo, :message, 2, "temporal.api.common.v1.Memo"
+ end
add_message "temporal.api.history.v1.StartChildWorkflowExecutionInitiatedEventAttributes" do
optional :namespace, :string, 1
+ optional :namespace_id, :string, 18
optional :workflow_id, :string, 2
optional :workflow_type, :message, 3, "temporal.api.common.v1.WorkflowType"
optional :task_queue, :message, 4, "temporal.api.taskqueue.v1.TaskQueue"
@@ -262,6 +290,7 @@
end
add_message "temporal.api.history.v1.StartChildWorkflowExecutionFailedEventAttributes" do
optional :namespace, :string, 1
+ optional :namespace_id, :string, 8
optional :workflow_id, :string, 2
optional :workflow_type, :message, 3, "temporal.api.common.v1.WorkflowType"
optional :cause, :enum, 4, "temporal.api.enums.v1.StartChildWorkflowExecutionFailedCause"
@@ -271,6 +300,7 @@
end
add_message "temporal.api.history.v1.ChildWorkflowExecutionStartedEventAttributes" do
optional :namespace, :string, 1
+ optional :namespace_id, :string, 6
optional :initiated_event_id, :int64, 2
optional :workflow_execution, :message, 3, "temporal.api.common.v1.WorkflowExecution"
optional :workflow_type, :message, 4, "temporal.api.common.v1.WorkflowType"
@@ -279,6 +309,7 @@
add_message "temporal.api.history.v1.ChildWorkflowExecutionCompletedEventAttributes" do
optional :result, :message, 1, "temporal.api.common.v1.Payloads"
optional :namespace, :string, 2
+ optional :namespace_id, :string, 7
optional :workflow_execution, :message, 3, "temporal.api.common.v1.WorkflowExecution"
optional :workflow_type, :message, 4, "temporal.api.common.v1.WorkflowType"
optional :initiated_event_id, :int64, 5
@@ -287,6 +318,7 @@
add_message "temporal.api.history.v1.ChildWorkflowExecutionFailedEventAttributes" do
optional :failure, :message, 1, "temporal.api.failure.v1.Failure"
optional :namespace, :string, 2
+ optional :namespace_id, :string, 8
optional :workflow_execution, :message, 3, "temporal.api.common.v1.WorkflowExecution"
optional :workflow_type, :message, 4, "temporal.api.common.v1.WorkflowType"
optional :initiated_event_id, :int64, 5
@@ -296,6 +328,7 @@
add_message "temporal.api.history.v1.ChildWorkflowExecutionCanceledEventAttributes" do
optional :details, :message, 1, "temporal.api.common.v1.Payloads"
optional :namespace, :string, 2
+ optional :namespace_id, :string, 7
optional :workflow_execution, :message, 3, "temporal.api.common.v1.WorkflowExecution"
optional :workflow_type, :message, 4, "temporal.api.common.v1.WorkflowType"
optional :initiated_event_id, :int64, 5
@@ -303,6 +336,7 @@
end
add_message "temporal.api.history.v1.ChildWorkflowExecutionTimedOutEventAttributes" do
optional :namespace, :string, 1
+ optional :namespace_id, :string, 7
optional :workflow_execution, :message, 2, "temporal.api.common.v1.WorkflowExecution"
optional :workflow_type, :message, 3, "temporal.api.common.v1.WorkflowType"
optional :initiated_event_id, :int64, 4
@@ -311,17 +345,47 @@
end
add_message "temporal.api.history.v1.ChildWorkflowExecutionTerminatedEventAttributes" do
optional :namespace, :string, 1
+ optional :namespace_id, :string, 6
optional :workflow_execution, :message, 2, "temporal.api.common.v1.WorkflowExecution"
optional :workflow_type, :message, 3, "temporal.api.common.v1.WorkflowType"
optional :initiated_event_id, :int64, 4
optional :started_event_id, :int64, 5
end
+ add_message "temporal.api.history.v1.WorkflowPropertiesModifiedExternallyEventAttributes" do
+ optional :new_task_queue, :string, 1
+ optional :new_workflow_task_timeout, :message, 2, "google.protobuf.Duration"
+ optional :new_workflow_run_timeout, :message, 3, "google.protobuf.Duration"
+ optional :new_workflow_execution_timeout, :message, 4, "google.protobuf.Duration"
+ optional :upserted_memo, :message, 5, "temporal.api.common.v1.Memo"
+ end
+ add_message "temporal.api.history.v1.ActivityPropertiesModifiedExternallyEventAttributes" do
+ optional :scheduled_event_id, :int64, 1
+ optional :new_retry_policy, :message, 2, "temporal.api.common.v1.RetryPolicy"
+ end
+ add_message "temporal.api.history.v1.WorkflowExecutionUpdateAcceptedEventAttributes" do
+ optional :protocol_instance_id, :string, 1
+ optional :accepted_request_message_id, :string, 2
+ optional :accepted_request_sequencing_event_id, :int64, 3
+ optional :accepted_request, :message, 4, "temporal.api.update.v1.Request"
+ end
+ add_message "temporal.api.history.v1.WorkflowExecutionUpdateCompletedEventAttributes" do
+ optional :meta, :message, 1, "temporal.api.update.v1.Meta"
+ optional :outcome, :message, 2, "temporal.api.update.v1.Outcome"
+ end
+ add_message "temporal.api.history.v1.WorkflowExecutionUpdateRejectedEventAttributes" do
+ optional :protocol_instance_id, :string, 1
+ optional :rejected_request_message_id, :string, 2
+ optional :rejected_request_sequencing_event_id, :int64, 3
+ optional :rejected_request, :message, 4, "temporal.api.update.v1.Request"
+ optional :failure, :message, 5, "temporal.api.failure.v1.Failure"
+ end
add_message "temporal.api.history.v1.HistoryEvent" do
optional :event_id, :int64, 1
optional :event_time, :message, 2, "google.protobuf.Timestamp"
optional :event_type, :enum, 3, "temporal.api.enums.v1.EventType"
optional :version, :int64, 4
optional :task_id, :int64, 5
+ optional :worker_may_ignore, :bool, 300
oneof :attributes do
optional :workflow_execution_started_event_attributes, :message, 6, "temporal.api.history.v1.WorkflowExecutionStartedEventAttributes"
optional :workflow_execution_completed_event_attributes, :message, 7, "temporal.api.history.v1.WorkflowExecutionCompletedEventAttributes"
@@ -363,6 +427,12 @@
optional :signal_external_workflow_execution_failed_event_attributes, :message, 43, "temporal.api.history.v1.SignalExternalWorkflowExecutionFailedEventAttributes"
optional :external_workflow_execution_signaled_event_attributes, :message, 44, "temporal.api.history.v1.ExternalWorkflowExecutionSignaledEventAttributes"
optional :upsert_workflow_search_attributes_event_attributes, :message, 45, "temporal.api.history.v1.UpsertWorkflowSearchAttributesEventAttributes"
+ optional :workflow_execution_update_accepted_event_attributes, :message, 46, "temporal.api.history.v1.WorkflowExecutionUpdateAcceptedEventAttributes"
+ optional :workflow_execution_update_rejected_event_attributes, :message, 47, "temporal.api.history.v1.WorkflowExecutionUpdateRejectedEventAttributes"
+ optional :workflow_execution_update_completed_event_attributes, :message, 48, "temporal.api.history.v1.WorkflowExecutionUpdateCompletedEventAttributes"
+ optional :workflow_properties_modified_externally_event_attributes, :message, 49, "temporal.api.history.v1.WorkflowPropertiesModifiedExternallyEventAttributes"
+ optional :activity_properties_modified_externally_event_attributes, :message, 50, "temporal.api.history.v1.ActivityPropertiesModifiedExternallyEventAttributes"
+ optional :workflow_properties_modified_event_attributes, :message, 51, "temporal.api.history.v1.WorkflowPropertiesModifiedEventAttributes"
end
end
add_message "temporal.api.history.v1.History" do
@@ -371,7 +441,7 @@
end
end
-module Temporal
+module Temporalio
module Api
module History
module V1
@@ -407,6 +477,7 @@ module V1
SignalExternalWorkflowExecutionFailedEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.SignalExternalWorkflowExecutionFailedEventAttributes").msgclass
ExternalWorkflowExecutionSignaledEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.ExternalWorkflowExecutionSignaledEventAttributes").msgclass
UpsertWorkflowSearchAttributesEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.UpsertWorkflowSearchAttributesEventAttributes").msgclass
+ WorkflowPropertiesModifiedEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.WorkflowPropertiesModifiedEventAttributes").msgclass
StartChildWorkflowExecutionInitiatedEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.StartChildWorkflowExecutionInitiatedEventAttributes").msgclass
StartChildWorkflowExecutionFailedEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.StartChildWorkflowExecutionFailedEventAttributes").msgclass
ChildWorkflowExecutionStartedEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.ChildWorkflowExecutionStartedEventAttributes").msgclass
@@ -415,6 +486,11 @@ module V1
ChildWorkflowExecutionCanceledEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.ChildWorkflowExecutionCanceledEventAttributes").msgclass
ChildWorkflowExecutionTimedOutEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.ChildWorkflowExecutionTimedOutEventAttributes").msgclass
ChildWorkflowExecutionTerminatedEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.ChildWorkflowExecutionTerminatedEventAttributes").msgclass
+ WorkflowPropertiesModifiedExternallyEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.WorkflowPropertiesModifiedExternallyEventAttributes").msgclass
+ ActivityPropertiesModifiedExternallyEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.ActivityPropertiesModifiedExternallyEventAttributes").msgclass
+ WorkflowExecutionUpdateAcceptedEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.WorkflowExecutionUpdateAcceptedEventAttributes").msgclass
+ WorkflowExecutionUpdateCompletedEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.WorkflowExecutionUpdateCompletedEventAttributes").msgclass
+ WorkflowExecutionUpdateRejectedEventAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.WorkflowExecutionUpdateRejectedEventAttributes").msgclass
HistoryEvent = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.HistoryEvent").msgclass
History = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.history.v1.History").msgclass
end
diff --git a/lib/gen/temporal/api/namespace/v1/message_pb.rb b/lib/gen/temporal/api/namespace/v1/message_pb.rb
index cea6262d..3042febe 100644
--- a/lib/gen/temporal/api/namespace/v1/message_pb.rb
+++ b/lib/gen/temporal/api/namespace/v1/message_pb.rb
@@ -5,7 +5,9 @@
require 'google/protobuf/duration_pb'
require 'google/protobuf/timestamp_pb'
+require 'dependencies/gogoproto/gogo_pb'
require 'temporal/api/enums/v1/namespace_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/namespace/v1/message.proto", :syntax => :proto3) do
add_message "temporal.api.namespace.v1.NamespaceInfo" do
@@ -15,6 +17,7 @@
optional :owner_email, :string, 4
map :data, :string, :string, 5
optional :id, :string, 6
+ optional :supports_schedules, :bool, 100
end
add_message "temporal.api.namespace.v1.NamespaceConfig" do
optional :workflow_execution_retention_ttl, :message, 1, "google.protobuf.Duration"
@@ -23,6 +26,7 @@
optional :history_archival_uri, :string, 4
optional :visibility_archival_state, :enum, 5, "temporal.api.enums.v1.ArchivalState"
optional :visibility_archival_uri, :string, 6
+ map :custom_search_attribute_aliases, :string, :string, 7
end
add_message "temporal.api.namespace.v1.BadBinaries" do
map :binaries, :string, :message, 1, "temporal.api.namespace.v1.BadBinaryInfo"
@@ -36,11 +40,15 @@
optional :description, :string, 1
optional :owner_email, :string, 2
map :data, :string, :string, 3
+ optional :state, :enum, 4, "temporal.api.enums.v1.NamespaceState"
+ end
+ add_message "temporal.api.namespace.v1.NamespaceFilter" do
+ optional :include_deleted, :bool, 1
end
end
end
-module Temporal
+module Temporalio
module Api
module Namespace
module V1
@@ -49,6 +57,7 @@ module V1
BadBinaries = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.namespace.v1.BadBinaries").msgclass
BadBinaryInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.namespace.v1.BadBinaryInfo").msgclass
UpdateNamespaceInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.namespace.v1.UpdateNamespaceInfo").msgclass
+ NamespaceFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.namespace.v1.NamespaceFilter").msgclass
end
end
end
diff --git a/lib/gen/temporal/api/operatorservice/v1/request_response_pb.rb b/lib/gen/temporal/api/operatorservice/v1/request_response_pb.rb
new file mode 100644
index 00000000..f85cb8ee
--- /dev/null
+++ b/lib/gen/temporal/api/operatorservice/v1/request_response_pb.rb
@@ -0,0 +1,88 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: temporal/api/operatorservice/v1/request_response.proto
+
+require 'google/protobuf'
+
+require 'temporal/api/enums/v1/common_pb'
+
+Google::Protobuf::DescriptorPool.generated_pool.build do
+ add_file("temporal/api/operatorservice/v1/request_response.proto", :syntax => :proto3) do
+ add_message "temporal.api.operatorservice.v1.AddSearchAttributesRequest" do
+ map :search_attributes, :string, :enum, 1, "temporal.api.enums.v1.IndexedValueType"
+ optional :namespace, :string, 2
+ end
+ add_message "temporal.api.operatorservice.v1.AddSearchAttributesResponse" do
+ end
+ add_message "temporal.api.operatorservice.v1.RemoveSearchAttributesRequest" do
+ repeated :search_attributes, :string, 1
+ optional :namespace, :string, 2
+ end
+ add_message "temporal.api.operatorservice.v1.RemoveSearchAttributesResponse" do
+ end
+ add_message "temporal.api.operatorservice.v1.ListSearchAttributesRequest" do
+ optional :namespace, :string, 1
+ end
+ add_message "temporal.api.operatorservice.v1.ListSearchAttributesResponse" do
+ map :custom_attributes, :string, :enum, 1, "temporal.api.enums.v1.IndexedValueType"
+ map :system_attributes, :string, :enum, 2, "temporal.api.enums.v1.IndexedValueType"
+ map :storage_schema, :string, :string, 3
+ end
+ add_message "temporal.api.operatorservice.v1.DeleteNamespaceRequest" do
+ optional :namespace, :string, 1
+ end
+ add_message "temporal.api.operatorservice.v1.DeleteNamespaceResponse" do
+ optional :deleted_namespace, :string, 1
+ end
+ add_message "temporal.api.operatorservice.v1.AddOrUpdateRemoteClusterRequest" do
+ optional :frontend_address, :string, 1
+ optional :enable_remote_cluster_connection, :bool, 2
+ end
+ add_message "temporal.api.operatorservice.v1.AddOrUpdateRemoteClusterResponse" do
+ end
+ add_message "temporal.api.operatorservice.v1.RemoveRemoteClusterRequest" do
+ optional :cluster_name, :string, 1
+ end
+ add_message "temporal.api.operatorservice.v1.RemoveRemoteClusterResponse" do
+ end
+ add_message "temporal.api.operatorservice.v1.ListClustersRequest" do
+ optional :page_size, :int32, 1
+ optional :next_page_token, :bytes, 2
+ end
+ add_message "temporal.api.operatorservice.v1.ListClustersResponse" do
+ repeated :clusters, :message, 1, "temporal.api.operatorservice.v1.ClusterMetadata"
+ optional :next_page_token, :bytes, 4
+ end
+ add_message "temporal.api.operatorservice.v1.ClusterMetadata" do
+ optional :cluster_name, :string, 1
+ optional :cluster_id, :string, 2
+ optional :address, :string, 3
+ optional :initial_failover_version, :int64, 4
+ optional :history_shard_count, :int32, 5
+ optional :is_connection_enabled, :bool, 6
+ end
+ end
+end
+
+module Temporalio
+ module Api
+ module OperatorService
+ module V1
+ AddSearchAttributesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.AddSearchAttributesRequest").msgclass
+ AddSearchAttributesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.AddSearchAttributesResponse").msgclass
+ RemoveSearchAttributesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.RemoveSearchAttributesRequest").msgclass
+ RemoveSearchAttributesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.RemoveSearchAttributesResponse").msgclass
+ ListSearchAttributesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.ListSearchAttributesRequest").msgclass
+ ListSearchAttributesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.ListSearchAttributesResponse").msgclass
+ DeleteNamespaceRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.DeleteNamespaceRequest").msgclass
+ DeleteNamespaceResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.DeleteNamespaceResponse").msgclass
+ AddOrUpdateRemoteClusterRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.AddOrUpdateRemoteClusterRequest").msgclass
+ AddOrUpdateRemoteClusterResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.AddOrUpdateRemoteClusterResponse").msgclass
+ RemoveRemoteClusterRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.RemoveRemoteClusterRequest").msgclass
+ RemoveRemoteClusterResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.RemoveRemoteClusterResponse").msgclass
+ ListClustersRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.ListClustersRequest").msgclass
+ ListClustersResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.ListClustersResponse").msgclass
+ ClusterMetadata = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.operatorservice.v1.ClusterMetadata").msgclass
+ end
+ end
+ end
+end
diff --git a/lib/gen/temporal/api/operatorservice/v1/service_pb.rb b/lib/gen/temporal/api/operatorservice/v1/service_pb.rb
new file mode 100644
index 00000000..515e36b1
--- /dev/null
+++ b/lib/gen/temporal/api/operatorservice/v1/service_pb.rb
@@ -0,0 +1,20 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: temporal/api/operatorservice/v1/service.proto
+
+require 'google/protobuf'
+
+require 'temporal/api/operatorservice/v1/request_response_pb'
+
+Google::Protobuf::DescriptorPool.generated_pool.build do
+ add_file("temporal/api/operatorservice/v1/service.proto", :syntax => :proto3) do
+ end
+end
+
+module Temporalio
+ module Api
+ module OperatorService
+ module V1
+ end
+ end
+ end
+end
diff --git a/lib/gen/temporal/api/operatorservice/v1/service_services_pb.rb b/lib/gen/temporal/api/operatorservice/v1/service_services_pb.rb
new file mode 100644
index 00000000..44a8c571
--- /dev/null
+++ b/lib/gen/temporal/api/operatorservice/v1/service_services_pb.rb
@@ -0,0 +1,78 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# Source: temporal/api/operatorservice/v1/service.proto for package 'Temporalio.Api.OperatorService.V1'
+# Original file comments:
+# The MIT License
+#
+# Copyright (c) 2020 Temporal Technologies Inc. All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+
+require 'grpc'
+require 'temporal/api/operatorservice/v1/service_pb'
+
+module Temporalio
+ module Api
+ module OperatorService
+ module V1
+ module OperatorService
+ # OperatorService API defines how Temporal SDKs and other clients interact with the Temporal server
+ # to perform administrative functions like registering a search attribute or a namespace.
+ # APIs in this file could be not compatible with Temporal Cloud, hence it's usage in SDKs should be limited by
+ # designated APIs that clearly state that they shouldn't be used by the main Application (Workflows & Activities) framework.
+ class Service
+
+ include ::GRPC::GenericService
+
+ self.marshal_class_method = :encode
+ self.unmarshal_class_method = :decode
+ self.service_name = 'temporal.api.operatorservice.v1.OperatorService'
+
+ # AddSearchAttributes add custom search attributes.
+ #
+ # Returns ALREADY_EXISTS status code if a Search Attribute with any of the specified names already exists
+ # Returns INTERNAL status code with temporal.api.errordetails.v1.SystemWorkflowFailure in Error Details if registration process fails,
+ rpc :AddSearchAttributes, ::Temporalio::Api::OperatorService::V1::AddSearchAttributesRequest, ::Temporalio::Api::OperatorService::V1::AddSearchAttributesResponse
+ # RemoveSearchAttributes removes custom search attributes.
+ #
+ # Returns NOT_FOUND status code if a Search Attribute with any of the specified names is not registered
+ rpc :RemoveSearchAttributes, ::Temporalio::Api::OperatorService::V1::RemoveSearchAttributesRequest, ::Temporalio::Api::OperatorService::V1::RemoveSearchAttributesResponse
+ # ListSearchAttributes returns comprehensive information about search attributes.
+ rpc :ListSearchAttributes, ::Temporalio::Api::OperatorService::V1::ListSearchAttributesRequest, ::Temporalio::Api::OperatorService::V1::ListSearchAttributesResponse
+ # DeleteNamespace synchronously deletes a namespace and asynchronously reclaims all namespace resources.
+ # (-- api-linter: core::0135::method-signature=disabled
+ # aip.dev/not-precedent: DeleteNamespace RPC doesn't follow Google API format. --)
+ # (-- api-linter: core::0135::response-message-name=disabled
+ # aip.dev/not-precedent: DeleteNamespace RPC doesn't follow Google API format. --)
+ rpc :DeleteNamespace, ::Temporalio::Api::OperatorService::V1::DeleteNamespaceRequest, ::Temporalio::Api::OperatorService::V1::DeleteNamespaceResponse
+ # AddOrUpdateRemoteCluster adds or updates remote cluster.
+ rpc :AddOrUpdateRemoteCluster, ::Temporalio::Api::OperatorService::V1::AddOrUpdateRemoteClusterRequest, ::Temporalio::Api::OperatorService::V1::AddOrUpdateRemoteClusterResponse
+ # RemoveRemoteCluster removes remote cluster.
+ rpc :RemoveRemoteCluster, ::Temporalio::Api::OperatorService::V1::RemoveRemoteClusterRequest, ::Temporalio::Api::OperatorService::V1::RemoveRemoteClusterResponse
+ # ListClusters returns information about Temporal clusters.
+ rpc :ListClusters, ::Temporalio::Api::OperatorService::V1::ListClustersRequest, ::Temporalio::Api::OperatorService::V1::ListClustersResponse
+ end
+
+ Stub = Service.rpc_stub_class
+ end
+ # (-- Search Attribute --)
+ end
+ end
+ end
+end
diff --git a/lib/gen/temporal/api/protocol/v1/message_pb.rb b/lib/gen/temporal/api/protocol/v1/message_pb.rb
new file mode 100644
index 00000000..577b0c11
--- /dev/null
+++ b/lib/gen/temporal/api/protocol/v1/message_pb.rb
@@ -0,0 +1,30 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: temporal/api/protocol/v1/message.proto
+
+require 'google/protobuf'
+
+require 'google/protobuf/any_pb'
+
+Google::Protobuf::DescriptorPool.generated_pool.build do
+ add_file("temporal/api/protocol/v1/message.proto", :syntax => :proto3) do
+ add_message "temporal.api.protocol.v1.Message" do
+ optional :id, :string, 1
+ optional :protocol_instance_id, :string, 2
+ optional :body, :message, 5, "google.protobuf.Any"
+ oneof :sequencing_id do
+ optional :event_id, :int64, 3
+ optional :command_index, :int64, 4
+ end
+ end
+ end
+end
+
+module Temporalio
+ module Api
+ module Protocol
+ module V1
+ Message = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.protocol.v1.Message").msgclass
+ end
+ end
+ end
+end
diff --git a/lib/gen/temporal/api/query/v1/message_pb.rb b/lib/gen/temporal/api/query/v1/message_pb.rb
index b3848ce5..652b77c1 100644
--- a/lib/gen/temporal/api/query/v1/message_pb.rb
+++ b/lib/gen/temporal/api/query/v1/message_pb.rb
@@ -6,11 +6,13 @@
require 'temporal/api/enums/v1/query_pb'
require 'temporal/api/enums/v1/workflow_pb'
require 'temporal/api/common/v1/message_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/query/v1/message.proto", :syntax => :proto3) do
add_message "temporal.api.query.v1.WorkflowQuery" do
optional :query_type, :string, 1
optional :query_args, :message, 2, "temporal.api.common.v1.Payloads"
+ optional :header, :message, 3, "temporal.api.common.v1.Header"
end
add_message "temporal.api.query.v1.WorkflowQueryResult" do
optional :result_type, :enum, 1, "temporal.api.enums.v1.QueryResultType"
@@ -23,7 +25,7 @@
end
end
-module Temporal
+module Temporalio
module Api
module Query
module V1
diff --git a/lib/gen/temporal/api/replication/v1/message_pb.rb b/lib/gen/temporal/api/replication/v1/message_pb.rb
index 6e964bc2..5336b7de 100644
--- a/lib/gen/temporal/api/replication/v1/message_pb.rb
+++ b/lib/gen/temporal/api/replication/v1/message_pb.rb
@@ -3,6 +3,10 @@
require 'google/protobuf'
+require 'google/protobuf/timestamp_pb'
+require 'dependencies/gogoproto/gogo_pb'
+require 'temporal/api/enums/v1/namespace_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/replication/v1/message.proto", :syntax => :proto3) do
add_message "temporal.api.replication.v1.ClusterReplicationConfig" do
@@ -11,16 +15,22 @@
add_message "temporal.api.replication.v1.NamespaceReplicationConfig" do
optional :active_cluster_name, :string, 1
repeated :clusters, :message, 2, "temporal.api.replication.v1.ClusterReplicationConfig"
+ optional :state, :enum, 3, "temporal.api.enums.v1.ReplicationState"
+ end
+ add_message "temporal.api.replication.v1.FailoverStatus" do
+ optional :failover_time, :message, 1, "google.protobuf.Timestamp"
+ optional :failover_version, :int64, 2
end
end
end
-module Temporal
+module Temporalio
module Api
module Replication
module V1
ClusterReplicationConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.replication.v1.ClusterReplicationConfig").msgclass
NamespaceReplicationConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.replication.v1.NamespaceReplicationConfig").msgclass
+ FailoverStatus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.replication.v1.FailoverStatus").msgclass
end
end
end
diff --git a/lib/gen/temporal/api/schedule/v1/message_pb.rb b/lib/gen/temporal/api/schedule/v1/message_pb.rb
new file mode 100644
index 00000000..1d2f5383
--- /dev/null
+++ b/lib/gen/temporal/api/schedule/v1/message_pb.rb
@@ -0,0 +1,149 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: temporal/api/schedule/v1/message.proto
+
+require 'google/protobuf'
+
+require 'google/protobuf/duration_pb'
+require 'google/protobuf/timestamp_pb'
+require 'dependencies/gogoproto/gogo_pb'
+require 'temporal/api/common/v1/message_pb'
+require 'temporal/api/enums/v1/schedule_pb'
+require 'temporal/api/workflow/v1/message_pb'
+
+Google::Protobuf::DescriptorPool.generated_pool.build do
+ add_file("temporal/api/schedule/v1/message.proto", :syntax => :proto3) do
+ add_message "temporal.api.schedule.v1.CalendarSpec" do
+ optional :second, :string, 1
+ optional :minute, :string, 2
+ optional :hour, :string, 3
+ optional :day_of_month, :string, 4
+ optional :month, :string, 5
+ optional :year, :string, 6
+ optional :day_of_week, :string, 7
+ optional :comment, :string, 8
+ end
+ add_message "temporal.api.schedule.v1.Range" do
+ optional :start, :int32, 1
+ optional :end, :int32, 2
+ optional :step, :int32, 3
+ end
+ add_message "temporal.api.schedule.v1.StructuredCalendarSpec" do
+ repeated :second, :message, 1, "temporal.api.schedule.v1.Range"
+ repeated :minute, :message, 2, "temporal.api.schedule.v1.Range"
+ repeated :hour, :message, 3, "temporal.api.schedule.v1.Range"
+ repeated :day_of_month, :message, 4, "temporal.api.schedule.v1.Range"
+ repeated :month, :message, 5, "temporal.api.schedule.v1.Range"
+ repeated :year, :message, 6, "temporal.api.schedule.v1.Range"
+ repeated :day_of_week, :message, 7, "temporal.api.schedule.v1.Range"
+ optional :comment, :string, 8
+ end
+ add_message "temporal.api.schedule.v1.IntervalSpec" do
+ optional :interval, :message, 1, "google.protobuf.Duration"
+ optional :phase, :message, 2, "google.protobuf.Duration"
+ end
+ add_message "temporal.api.schedule.v1.ScheduleSpec" do
+ repeated :structured_calendar, :message, 7, "temporal.api.schedule.v1.StructuredCalendarSpec"
+ repeated :cron_string, :string, 8
+ repeated :calendar, :message, 1, "temporal.api.schedule.v1.CalendarSpec"
+ repeated :interval, :message, 2, "temporal.api.schedule.v1.IntervalSpec"
+ repeated :exclude_calendar, :message, 3, "temporal.api.schedule.v1.CalendarSpec"
+ repeated :exclude_structured_calendar, :message, 9, "temporal.api.schedule.v1.StructuredCalendarSpec"
+ optional :start_time, :message, 4, "google.protobuf.Timestamp"
+ optional :end_time, :message, 5, "google.protobuf.Timestamp"
+ optional :jitter, :message, 6, "google.protobuf.Duration"
+ optional :timezone_name, :string, 10
+ optional :timezone_data, :bytes, 11
+ end
+ add_message "temporal.api.schedule.v1.SchedulePolicies" do
+ optional :overlap_policy, :enum, 1, "temporal.api.enums.v1.ScheduleOverlapPolicy"
+ optional :catchup_window, :message, 2, "google.protobuf.Duration"
+ optional :pause_on_failure, :bool, 3
+ end
+ add_message "temporal.api.schedule.v1.ScheduleAction" do
+ oneof :action do
+ optional :start_workflow, :message, 1, "temporal.api.workflow.v1.NewWorkflowExecutionInfo"
+ end
+ end
+ add_message "temporal.api.schedule.v1.ScheduleActionResult" do
+ optional :schedule_time, :message, 1, "google.protobuf.Timestamp"
+ optional :actual_time, :message, 2, "google.protobuf.Timestamp"
+ optional :start_workflow_result, :message, 11, "temporal.api.common.v1.WorkflowExecution"
+ end
+ add_message "temporal.api.schedule.v1.ScheduleState" do
+ optional :notes, :string, 1
+ optional :paused, :bool, 2
+ optional :limited_actions, :bool, 3
+ optional :remaining_actions, :int64, 4
+ end
+ add_message "temporal.api.schedule.v1.TriggerImmediatelyRequest" do
+ optional :overlap_policy, :enum, 1, "temporal.api.enums.v1.ScheduleOverlapPolicy"
+ end
+ add_message "temporal.api.schedule.v1.BackfillRequest" do
+ optional :start_time, :message, 1, "google.protobuf.Timestamp"
+ optional :end_time, :message, 2, "google.protobuf.Timestamp"
+ optional :overlap_policy, :enum, 3, "temporal.api.enums.v1.ScheduleOverlapPolicy"
+ end
+ add_message "temporal.api.schedule.v1.SchedulePatch" do
+ optional :trigger_immediately, :message, 1, "temporal.api.schedule.v1.TriggerImmediatelyRequest"
+ repeated :backfill_request, :message, 2, "temporal.api.schedule.v1.BackfillRequest"
+ optional :pause, :string, 3
+ optional :unpause, :string, 4
+ end
+ add_message "temporal.api.schedule.v1.ScheduleInfo" do
+ optional :action_count, :int64, 1
+ optional :missed_catchup_window, :int64, 2
+ optional :overlap_skipped, :int64, 3
+ repeated :running_workflows, :message, 9, "temporal.api.common.v1.WorkflowExecution"
+ repeated :recent_actions, :message, 4, "temporal.api.schedule.v1.ScheduleActionResult"
+ repeated :future_action_times, :message, 5, "google.protobuf.Timestamp"
+ optional :create_time, :message, 6, "google.protobuf.Timestamp"
+ optional :update_time, :message, 7, "google.protobuf.Timestamp"
+ optional :invalid_schedule_error, :string, 8
+ end
+ add_message "temporal.api.schedule.v1.Schedule" do
+ optional :spec, :message, 1, "temporal.api.schedule.v1.ScheduleSpec"
+ optional :action, :message, 2, "temporal.api.schedule.v1.ScheduleAction"
+ optional :policies, :message, 3, "temporal.api.schedule.v1.SchedulePolicies"
+ optional :state, :message, 4, "temporal.api.schedule.v1.ScheduleState"
+ end
+ add_message "temporal.api.schedule.v1.ScheduleListInfo" do
+ optional :spec, :message, 1, "temporal.api.schedule.v1.ScheduleSpec"
+ optional :workflow_type, :message, 2, "temporal.api.common.v1.WorkflowType"
+ optional :notes, :string, 3
+ optional :paused, :bool, 4
+ repeated :recent_actions, :message, 5, "temporal.api.schedule.v1.ScheduleActionResult"
+ repeated :future_action_times, :message, 6, "google.protobuf.Timestamp"
+ end
+ add_message "temporal.api.schedule.v1.ScheduleListEntry" do
+ optional :schedule_id, :string, 1
+ optional :memo, :message, 2, "temporal.api.common.v1.Memo"
+ optional :search_attributes, :message, 3, "temporal.api.common.v1.SearchAttributes"
+ optional :info, :message, 4, "temporal.api.schedule.v1.ScheduleListInfo"
+ end
+ end
+end
+
+module Temporalio
+ module Api
+ module Schedule
+ module V1
+ CalendarSpec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.CalendarSpec").msgclass
+ Range = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.Range").msgclass
+ StructuredCalendarSpec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.StructuredCalendarSpec").msgclass
+ IntervalSpec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.IntervalSpec").msgclass
+ ScheduleSpec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.ScheduleSpec").msgclass
+ SchedulePolicies = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.SchedulePolicies").msgclass
+ ScheduleAction = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.ScheduleAction").msgclass
+ ScheduleActionResult = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.ScheduleActionResult").msgclass
+ ScheduleState = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.ScheduleState").msgclass
+ TriggerImmediatelyRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.TriggerImmediatelyRequest").msgclass
+ BackfillRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.BackfillRequest").msgclass
+ SchedulePatch = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.SchedulePatch").msgclass
+ ScheduleInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.ScheduleInfo").msgclass
+ Schedule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.Schedule").msgclass
+ ScheduleListInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.ScheduleListInfo").msgclass
+ ScheduleListEntry = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.schedule.v1.ScheduleListEntry").msgclass
+ end
+ end
+ end
+end
diff --git a/lib/gen/temporal/api/sdk/v1/task_complete_metadata_pb.rb b/lib/gen/temporal/api/sdk/v1/task_complete_metadata_pb.rb
new file mode 100644
index 00000000..281bd518
--- /dev/null
+++ b/lib/gen/temporal/api/sdk/v1/task_complete_metadata_pb.rb
@@ -0,0 +1,23 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: temporal/api/sdk/v1/task_complete_metadata.proto
+
+require 'google/protobuf'
+
+Google::Protobuf::DescriptorPool.generated_pool.build do
+ add_file("temporal/api/sdk/v1/task_complete_metadata.proto", :syntax => :proto3) do
+ add_message "temporal.api.sdk.v1.WorkflowTaskCompletedMetadata" do
+ repeated :core_used_flags, :uint32, 1
+ repeated :lang_used_flags, :uint32, 2
+ end
+ end
+end
+
+module Temporalio
+ module Api
+ module Sdk
+ module V1
+ WorkflowTaskCompletedMetadata = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.sdk.v1.WorkflowTaskCompletedMetadata").msgclass
+ end
+ end
+ end
+end
diff --git a/lib/gen/temporal/api/taskqueue/v1/message_pb.rb b/lib/gen/temporal/api/taskqueue/v1/message_pb.rb
index 839cd994..053e5f67 100644
--- a/lib/gen/temporal/api/taskqueue/v1/message_pb.rb
+++ b/lib/gen/temporal/api/taskqueue/v1/message_pb.rb
@@ -6,7 +6,10 @@
require 'google/protobuf/duration_pb'
require 'google/protobuf/timestamp_pb'
require 'google/protobuf/wrappers_pb'
+require 'dependencies/gogoproto/gogo_pb'
require 'temporal/api/enums/v1/task_queue_pb'
+require 'temporal/api/common/v1/message_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/taskqueue/v1/message.proto", :syntax => :proto3) do
add_message "temporal.api.taskqueue.v1.TaskQueue" do
@@ -35,15 +38,20 @@
optional :last_access_time, :message, 1, "google.protobuf.Timestamp"
optional :identity, :string, 2
optional :rate_per_second, :double, 3
+ optional :worker_version_capabilities, :message, 4, "temporal.api.common.v1.WorkerVersionCapabilities"
end
add_message "temporal.api.taskqueue.v1.StickyExecutionAttributes" do
optional :worker_task_queue, :message, 1, "temporal.api.taskqueue.v1.TaskQueue"
optional :schedule_to_start_timeout, :message, 2, "google.protobuf.Duration"
end
+ add_message "temporal.api.taskqueue.v1.CompatibleVersionSet" do
+ optional :version_set_id, :string, 1
+ repeated :build_ids, :string, 2
+ end
end
end
-module Temporal
+module Temporalio
module Api
module TaskQueue
module V1
@@ -54,6 +62,7 @@ module V1
TaskQueuePartitionMetadata = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.taskqueue.v1.TaskQueuePartitionMetadata").msgclass
PollerInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.taskqueue.v1.PollerInfo").msgclass
StickyExecutionAttributes = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.taskqueue.v1.StickyExecutionAttributes").msgclass
+ CompatibleVersionSet = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.taskqueue.v1.CompatibleVersionSet").msgclass
end
end
end
diff --git a/lib/gen/temporal/api/update/v1/message_pb.rb b/lib/gen/temporal/api/update/v1/message_pb.rb
new file mode 100644
index 00000000..f438bc27
--- /dev/null
+++ b/lib/gen/temporal/api/update/v1/message_pb.rb
@@ -0,0 +1,72 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: temporal/api/update/v1/message.proto
+
+require 'google/protobuf'
+
+require 'temporal/api/common/v1/message_pb'
+require 'temporal/api/enums/v1/update_pb'
+require 'temporal/api/failure/v1/message_pb'
+
+Google::Protobuf::DescriptorPool.generated_pool.build do
+ add_file("temporal/api/update/v1/message.proto", :syntax => :proto3) do
+ add_message "temporal.api.update.v1.WaitPolicy" do
+ optional :lifecycle_stage, :enum, 1, "temporal.api.enums.v1.UpdateWorkflowExecutionLifecycleStage"
+ end
+ add_message "temporal.api.update.v1.UpdateRef" do
+ optional :workflow_execution, :message, 1, "temporal.api.common.v1.WorkflowExecution"
+ optional :update_id, :string, 2
+ end
+ add_message "temporal.api.update.v1.Outcome" do
+ oneof :value do
+ optional :success, :message, 1, "temporal.api.common.v1.Payloads"
+ optional :failure, :message, 2, "temporal.api.failure.v1.Failure"
+ end
+ end
+ add_message "temporal.api.update.v1.Meta" do
+ optional :update_id, :string, 1
+ optional :identity, :string, 2
+ end
+ add_message "temporal.api.update.v1.Input" do
+ optional :header, :message, 1, "temporal.api.common.v1.Header"
+ optional :name, :string, 2
+ optional :args, :message, 3, "temporal.api.common.v1.Payloads"
+ end
+ add_message "temporal.api.update.v1.Request" do
+ optional :meta, :message, 1, "temporal.api.update.v1.Meta"
+ optional :input, :message, 2, "temporal.api.update.v1.Input"
+ end
+ add_message "temporal.api.update.v1.Rejection" do
+ optional :rejected_request_message_id, :string, 1
+ optional :rejected_request_sequencing_event_id, :int64, 2
+ optional :rejected_request, :message, 3, "temporal.api.update.v1.Request"
+ optional :failure, :message, 4, "temporal.api.failure.v1.Failure"
+ end
+ add_message "temporal.api.update.v1.Acceptance" do
+ optional :accepted_request_message_id, :string, 1
+ optional :accepted_request_sequencing_event_id, :int64, 2
+ optional :accepted_request, :message, 3, "temporal.api.update.v1.Request"
+ end
+ add_message "temporal.api.update.v1.Response" do
+ optional :meta, :message, 1, "temporal.api.update.v1.Meta"
+ optional :outcome, :message, 2, "temporal.api.update.v1.Outcome"
+ end
+ end
+end
+
+module Temporalio
+ module Api
+ module Update
+ module V1
+ WaitPolicy = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.update.v1.WaitPolicy").msgclass
+ UpdateRef = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.update.v1.UpdateRef").msgclass
+ Outcome = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.update.v1.Outcome").msgclass
+ Meta = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.update.v1.Meta").msgclass
+ Input = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.update.v1.Input").msgclass
+ Request = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.update.v1.Request").msgclass
+ Rejection = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.update.v1.Rejection").msgclass
+ Acceptance = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.update.v1.Acceptance").msgclass
+ Response = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.update.v1.Response").msgclass
+ end
+ end
+ end
+end
diff --git a/lib/gen/temporal/api/version/v1/message_pb.rb b/lib/gen/temporal/api/version/v1/message_pb.rb
index f166d07d..02302766 100644
--- a/lib/gen/temporal/api/version/v1/message_pb.rb
+++ b/lib/gen/temporal/api/version/v1/message_pb.rb
@@ -4,7 +4,9 @@
require 'google/protobuf'
require 'google/protobuf/timestamp_pb'
+require 'dependencies/gogoproto/gogo_pb'
require 'temporal/api/enums/v1/common_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/version/v1/message.proto", :syntax => :proto3) do
add_message "temporal.api.version.v1.ReleaseInfo" do
@@ -26,7 +28,7 @@
end
end
-module Temporal
+module Temporalio
module Api
module Version
module V1
diff --git a/lib/gen/temporal/api/workflow/v1/message_pb.rb b/lib/gen/temporal/api/workflow/v1/message_pb.rb
index c02b89ca..470fd0a4 100644
--- a/lib/gen/temporal/api/workflow/v1/message_pb.rb
+++ b/lib/gen/temporal/api/workflow/v1/message_pb.rb
@@ -5,10 +5,12 @@
require 'google/protobuf/duration_pb'
require 'google/protobuf/timestamp_pb'
+require 'dependencies/gogoproto/gogo_pb'
require 'temporal/api/enums/v1/workflow_pb'
require 'temporal/api/common/v1/message_pb'
require 'temporal/api/failure/v1/message_pb'
require 'temporal/api/taskqueue/v1/message_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/workflow/v1/message.proto", :syntax => :proto3) do
add_message "temporal.api.workflow.v1.WorkflowExecutionInfo" do
@@ -25,6 +27,9 @@
optional :search_attributes, :message, 11, "temporal.api.common.v1.SearchAttributes"
optional :auto_reset_points, :message, 12, "temporal.api.workflow.v1.ResetPoints"
optional :task_queue, :string, 13
+ optional :state_transition_count, :int64, 14
+ optional :history_size_bytes, :int64, 15
+ optional :most_recent_worker_version_stamp, :message, 16, "temporal.api.common.v1.WorkerVersionStamp"
end
add_message "temporal.api.workflow.v1.WorkflowExecutionConfig" do
optional :task_queue, :message, 1, "temporal.api.taskqueue.v1.TaskQueue"
@@ -53,6 +58,13 @@
optional :initiated_id, :int64, 4
optional :parent_close_policy, :enum, 5, "temporal.api.enums.v1.ParentClosePolicy"
end
+ add_message "temporal.api.workflow.v1.PendingWorkflowTaskInfo" do
+ optional :state, :enum, 1, "temporal.api.enums.v1.PendingWorkflowTaskState"
+ optional :scheduled_time, :message, 2, "google.protobuf.Timestamp"
+ optional :original_scheduled_time, :message, 3, "google.protobuf.Timestamp"
+ optional :started_time, :message, 4, "google.protobuf.Timestamp"
+ optional :attempt, :int32, 5
+ end
add_message "temporal.api.workflow.v1.ResetPoints" do
repeated :points, :message, 1, "temporal.api.workflow.v1.ResetPointInfo"
end
@@ -64,10 +76,25 @@
optional :expire_time, :message, 5, "google.protobuf.Timestamp"
optional :resettable, :bool, 6
end
+ add_message "temporal.api.workflow.v1.NewWorkflowExecutionInfo" do
+ optional :workflow_id, :string, 1
+ optional :workflow_type, :message, 2, "temporal.api.common.v1.WorkflowType"
+ optional :task_queue, :message, 3, "temporal.api.taskqueue.v1.TaskQueue"
+ optional :input, :message, 4, "temporal.api.common.v1.Payloads"
+ optional :workflow_execution_timeout, :message, 5, "google.protobuf.Duration"
+ optional :workflow_run_timeout, :message, 6, "google.protobuf.Duration"
+ optional :workflow_task_timeout, :message, 7, "google.protobuf.Duration"
+ optional :workflow_id_reuse_policy, :enum, 8, "temporal.api.enums.v1.WorkflowIdReusePolicy"
+ optional :retry_policy, :message, 9, "temporal.api.common.v1.RetryPolicy"
+ optional :cron_schedule, :string, 10
+ optional :memo, :message, 11, "temporal.api.common.v1.Memo"
+ optional :search_attributes, :message, 12, "temporal.api.common.v1.SearchAttributes"
+ optional :header, :message, 13, "temporal.api.common.v1.Header"
+ end
end
end
-module Temporal
+module Temporalio
module Api
module Workflow
module V1
@@ -75,8 +102,10 @@ module V1
WorkflowExecutionConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflow.v1.WorkflowExecutionConfig").msgclass
PendingActivityInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflow.v1.PendingActivityInfo").msgclass
PendingChildExecutionInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflow.v1.PendingChildExecutionInfo").msgclass
+ PendingWorkflowTaskInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflow.v1.PendingWorkflowTaskInfo").msgclass
ResetPoints = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflow.v1.ResetPoints").msgclass
ResetPointInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflow.v1.ResetPointInfo").msgclass
+ NewWorkflowExecutionInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflow.v1.NewWorkflowExecutionInfo").msgclass
end
end
end
diff --git a/lib/gen/temporal/api/workflowservice/v1/request_response_pb.rb b/lib/gen/temporal/api/workflowservice/v1/request_response_pb.rb
index 57a2f735..ab356996 100644
--- a/lib/gen/temporal/api/workflowservice/v1/request_response_pb.rb
+++ b/lib/gen/temporal/api/workflowservice/v1/request_response_pb.rb
@@ -3,11 +3,13 @@
require 'google/protobuf'
+require 'temporal/api/enums/v1/batch_operation_pb'
require 'temporal/api/enums/v1/workflow_pb'
require 'temporal/api/enums/v1/namespace_pb'
require 'temporal/api/enums/v1/failed_cause_pb'
require 'temporal/api/enums/v1/common_pb'
require 'temporal/api/enums/v1/query_pb'
+require 'temporal/api/enums/v1/reset_pb'
require 'temporal/api/enums/v1/task_queue_pb'
require 'temporal/api/common/v1/message_pb'
require 'temporal/api/history/v1/message_pb'
@@ -15,13 +17,20 @@
require 'temporal/api/command/v1/message_pb'
require 'temporal/api/failure/v1/message_pb'
require 'temporal/api/filter/v1/message_pb'
+require 'temporal/api/protocol/v1/message_pb'
require 'temporal/api/namespace/v1/message_pb'
require 'temporal/api/query/v1/message_pb'
require 'temporal/api/replication/v1/message_pb'
+require 'temporal/api/schedule/v1/message_pb'
require 'temporal/api/taskqueue/v1/message_pb'
+require 'temporal/api/update/v1/message_pb'
require 'temporal/api/version/v1/message_pb'
+require 'temporal/api/batch/v1/message_pb'
+require 'temporal/api/sdk/v1/task_complete_metadata_pb'
require 'google/protobuf/duration_pb'
require 'google/protobuf/timestamp_pb'
+require 'dependencies/gogoproto/gogo_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/workflowservice/v1/request_response.proto", :syntax => :proto3) do
add_message "temporal.api.workflowservice.v1.RegisterNamespaceRequest" do
@@ -44,6 +53,7 @@
add_message "temporal.api.workflowservice.v1.ListNamespacesRequest" do
optional :page_size, :int32, 1
optional :next_page_token, :bytes, 2
+ optional :namespace_filter, :message, 3, "temporal.api.namespace.v1.NamespaceFilter"
end
add_message "temporal.api.workflowservice.v1.ListNamespacesResponse" do
repeated :namespaces, :message, 1, "temporal.api.workflowservice.v1.DescribeNamespaceResponse"
@@ -59,6 +69,7 @@
optional :replication_config, :message, 3, "temporal.api.replication.v1.NamespaceReplicationConfig"
optional :failover_version, :int64, 4
optional :is_global_namespace, :bool, 5
+ repeated :failover_history, :message, 6, "temporal.api.replication.v1.FailoverStatus"
end
add_message "temporal.api.workflowservice.v1.UpdateNamespaceRequest" do
optional :namespace, :string, 1
@@ -67,6 +78,7 @@
optional :replication_config, :message, 4, "temporal.api.replication.v1.NamespaceReplicationConfig"
optional :security_token, :string, 5
optional :delete_bad_binary, :string, 6
+ optional :promote_namespace, :bool, 7
end
add_message "temporal.api.workflowservice.v1.UpdateNamespaceResponse" do
optional :namespace_info, :message, 1, "temporal.api.namespace.v1.NamespaceInfo"
@@ -98,9 +110,14 @@
optional :memo, :message, 14, "temporal.api.common.v1.Memo"
optional :search_attributes, :message, 15, "temporal.api.common.v1.SearchAttributes"
optional :header, :message, 16, "temporal.api.common.v1.Header"
+ optional :request_eager_execution, :bool, 17
+ optional :continued_failure, :message, 18, "temporal.api.failure.v1.Failure"
+ optional :last_completion_result, :message, 19, "temporal.api.common.v1.Payloads"
+ optional :workflow_start_delay, :message, 20, "google.protobuf.Duration"
end
add_message "temporal.api.workflowservice.v1.StartWorkflowExecutionResponse" do
optional :run_id, :string, 1
+ optional :eager_workflow_task, :message, 2, "temporal.api.workflowservice.v1.PollWorkflowTaskQueueResponse"
end
add_message "temporal.api.workflowservice.v1.GetWorkflowExecutionHistoryRequest" do
optional :namespace, :string, 1
@@ -117,11 +134,22 @@
optional :next_page_token, :bytes, 3
optional :archived, :bool, 4
end
+ add_message "temporal.api.workflowservice.v1.GetWorkflowExecutionHistoryReverseRequest" do
+ optional :namespace, :string, 1
+ optional :execution, :message, 2, "temporal.api.common.v1.WorkflowExecution"
+ optional :maximum_page_size, :int32, 3
+ optional :next_page_token, :bytes, 4
+ end
+ add_message "temporal.api.workflowservice.v1.GetWorkflowExecutionHistoryReverseResponse" do
+ optional :history, :message, 1, "temporal.api.history.v1.History"
+ optional :next_page_token, :bytes, 3
+ end
add_message "temporal.api.workflowservice.v1.PollWorkflowTaskQueueRequest" do
optional :namespace, :string, 1
optional :task_queue, :message, 2, "temporal.api.taskqueue.v1.TaskQueue"
optional :identity, :string, 3
optional :binary_checksum, :string, 4
+ optional :worker_version_capabilities, :message, 5, "temporal.api.common.v1.WorkerVersionCapabilities"
end
add_message "temporal.api.workflowservice.v1.PollWorkflowTaskQueueResponse" do
optional :task_token, :bytes, 1
@@ -138,6 +166,7 @@
optional :scheduled_time, :message, 12, "google.protobuf.Timestamp"
optional :started_time, :message, 13, "google.protobuf.Timestamp"
map :queries, :string, :message, 14, "temporal.api.query.v1.WorkflowQuery"
+ repeated :messages, :message, 15, "temporal.api.protocol.v1.Message"
end
add_message "temporal.api.workflowservice.v1.RespondWorkflowTaskCompletedRequest" do
optional :task_token, :bytes, 1
@@ -149,9 +178,15 @@
optional :binary_checksum, :string, 7
map :query_results, :string, :message, 8, "temporal.api.query.v1.WorkflowQueryResult"
optional :namespace, :string, 9
+ optional :worker_version_stamp, :message, 10, "temporal.api.common.v1.WorkerVersionStamp"
+ repeated :messages, :message, 11, "temporal.api.protocol.v1.Message"
+ optional :sdk_metadata, :message, 12, "temporal.api.sdk.v1.WorkflowTaskCompletedMetadata"
+ optional :metering_metadata, :message, 13, "temporal.api.common.v1.MeteringMetadata"
end
add_message "temporal.api.workflowservice.v1.RespondWorkflowTaskCompletedResponse" do
optional :workflow_task, :message, 1, "temporal.api.workflowservice.v1.PollWorkflowTaskQueueResponse"
+ repeated :activity_tasks, :message, 2, "temporal.api.workflowservice.v1.PollActivityTaskQueueResponse"
+ optional :reset_history_event_id, :int64, 3
end
add_message "temporal.api.workflowservice.v1.RespondWorkflowTaskFailedRequest" do
optional :task_token, :bytes, 1
@@ -160,6 +195,7 @@
optional :identity, :string, 4
optional :binary_checksum, :string, 5
optional :namespace, :string, 6
+ repeated :messages, :message, 7, "temporal.api.protocol.v1.Message"
end
add_message "temporal.api.workflowservice.v1.RespondWorkflowTaskFailedResponse" do
end
@@ -168,6 +204,7 @@
optional :task_queue, :message, 2, "temporal.api.taskqueue.v1.TaskQueue"
optional :identity, :string, 3
optional :task_queue_metadata, :message, 4, "temporal.api.taskqueue.v1.TaskQueueMetadata"
+ optional :worker_version_capabilities, :message, 5, "temporal.api.common.v1.WorkerVersionCapabilities"
end
add_message "temporal.api.workflowservice.v1.PollActivityTaskQueueResponse" do
optional :task_token, :bytes, 1
@@ -231,8 +268,10 @@
optional :failure, :message, 2, "temporal.api.failure.v1.Failure"
optional :identity, :string, 3
optional :namespace, :string, 4
+ optional :last_heartbeat_details, :message, 5, "temporal.api.common.v1.Payloads"
end
add_message "temporal.api.workflowservice.v1.RespondActivityTaskFailedResponse" do
+ repeated :failures, :message, 1, "temporal.api.failure.v1.Failure"
end
add_message "temporal.api.workflowservice.v1.RespondActivityTaskFailedByIdRequest" do
optional :namespace, :string, 1
@@ -241,8 +280,10 @@
optional :activity_id, :string, 4
optional :failure, :message, 5, "temporal.api.failure.v1.Failure"
optional :identity, :string, 6
+ optional :last_heartbeat_details, :message, 7, "temporal.api.common.v1.Payloads"
end
add_message "temporal.api.workflowservice.v1.RespondActivityTaskFailedByIdResponse" do
+ repeated :failures, :message, 1, "temporal.api.failure.v1.Failure"
end
add_message "temporal.api.workflowservice.v1.RespondActivityTaskCanceledRequest" do
optional :task_token, :bytes, 1
@@ -268,6 +309,7 @@
optional :identity, :string, 3
optional :request_id, :string, 4
optional :first_execution_run_id, :string, 5
+ optional :reason, :string, 6
end
add_message "temporal.api.workflowservice.v1.RequestCancelWorkflowExecutionResponse" do
end
@@ -279,6 +321,8 @@
optional :identity, :string, 5
optional :request_id, :string, 6
optional :control, :string, 7
+ optional :header, :message, 8, "temporal.api.common.v1.Header"
+ optional :skip_generate_workflow_task, :bool, 9
end
add_message "temporal.api.workflowservice.v1.SignalWorkflowExecutionResponse" do
end
@@ -302,6 +346,8 @@
optional :memo, :message, 17, "temporal.api.common.v1.Memo"
optional :search_attributes, :message, 18, "temporal.api.common.v1.SearchAttributes"
optional :header, :message, 19, "temporal.api.common.v1.Header"
+ optional :workflow_start_delay, :message, 20, "google.protobuf.Duration"
+ optional :skip_generate_workflow_task, :bool, 21
end
add_message "temporal.api.workflowservice.v1.SignalWithStartWorkflowExecutionResponse" do
optional :run_id, :string, 1
@@ -312,6 +358,7 @@
optional :reason, :string, 3
optional :workflow_task_finish_event_id, :int64, 4
optional :request_id, :string, 5
+ optional :reset_reapply_type, :enum, 6, "temporal.api.enums.v1.ResetReapplyType"
end
add_message "temporal.api.workflowservice.v1.ResetWorkflowExecutionResponse" do
optional :run_id, :string, 1
@@ -326,6 +373,12 @@
end
add_message "temporal.api.workflowservice.v1.TerminateWorkflowExecutionResponse" do
end
+ add_message "temporal.api.workflowservice.v1.DeleteWorkflowExecutionRequest" do
+ optional :namespace, :string, 1
+ optional :workflow_execution, :message, 2, "temporal.api.common.v1.WorkflowExecution"
+ end
+ add_message "temporal.api.workflowservice.v1.DeleteWorkflowExecutionResponse" do
+ end
add_message "temporal.api.workflowservice.v1.ListOpenWorkflowExecutionsRequest" do
optional :namespace, :string, 1
optional :maximum_page_size, :int32, 2
@@ -431,6 +484,7 @@
optional :workflow_execution_info, :message, 2, "temporal.api.workflow.v1.WorkflowExecutionInfo"
repeated :pending_activities, :message, 3, "temporal.api.workflow.v1.PendingActivityInfo"
repeated :pending_children, :message, 4, "temporal.api.workflow.v1.PendingChildExecutionInfo"
+ optional :pending_workflow_task, :message, 5, "temporal.api.workflow.v1.PendingWorkflowTaskInfo"
end
add_message "temporal.api.workflowservice.v1.DescribeTaskQueueRequest" do
optional :namespace, :string, 1
@@ -451,6 +505,25 @@
optional :version_info, :message, 4, "temporal.api.version.v1.VersionInfo"
optional :cluster_name, :string, 5
optional :history_shard_count, :int32, 6
+ optional :persistence_store, :string, 7
+ optional :visibility_store, :string, 8
+ end
+ add_message "temporal.api.workflowservice.v1.GetSystemInfoRequest" do
+ end
+ add_message "temporal.api.workflowservice.v1.GetSystemInfoResponse" do
+ optional :server_version, :string, 1
+ optional :capabilities, :message, 2, "temporal.api.workflowservice.v1.GetSystemInfoResponse.Capabilities"
+ end
+ add_message "temporal.api.workflowservice.v1.GetSystemInfoResponse.Capabilities" do
+ optional :signal_and_query_header, :bool, 1
+ optional :internal_error_differentiation, :bool, 2
+ optional :activity_failure_include_heartbeat, :bool, 3
+ optional :supports_schedules, :bool, 4
+ optional :encoded_failure_attributes, :bool, 5
+ optional :build_id_based_versioning, :bool, 6
+ optional :upsert_memo, :bool, 7
+ optional :eager_workflow_start, :bool, 8
+ optional :sdk_metadata, :bool, 9
end
add_message "temporal.api.workflowservice.v1.ListTaskQueuePartitionsRequest" do
optional :namespace, :string, 1
@@ -460,10 +533,185 @@
repeated :activity_task_queue_partitions, :message, 1, "temporal.api.taskqueue.v1.TaskQueuePartitionMetadata"
repeated :workflow_task_queue_partitions, :message, 2, "temporal.api.taskqueue.v1.TaskQueuePartitionMetadata"
end
+ add_message "temporal.api.workflowservice.v1.CreateScheduleRequest" do
+ optional :namespace, :string, 1
+ optional :schedule_id, :string, 2
+ optional :schedule, :message, 3, "temporal.api.schedule.v1.Schedule"
+ optional :initial_patch, :message, 4, "temporal.api.schedule.v1.SchedulePatch"
+ optional :identity, :string, 5
+ optional :request_id, :string, 6
+ optional :memo, :message, 7, "temporal.api.common.v1.Memo"
+ optional :search_attributes, :message, 8, "temporal.api.common.v1.SearchAttributes"
+ end
+ add_message "temporal.api.workflowservice.v1.CreateScheduleResponse" do
+ optional :conflict_token, :bytes, 1
+ end
+ add_message "temporal.api.workflowservice.v1.DescribeScheduleRequest" do
+ optional :namespace, :string, 1
+ optional :schedule_id, :string, 2
+ end
+ add_message "temporal.api.workflowservice.v1.DescribeScheduleResponse" do
+ optional :schedule, :message, 1, "temporal.api.schedule.v1.Schedule"
+ optional :info, :message, 2, "temporal.api.schedule.v1.ScheduleInfo"
+ optional :memo, :message, 3, "temporal.api.common.v1.Memo"
+ optional :search_attributes, :message, 4, "temporal.api.common.v1.SearchAttributes"
+ optional :conflict_token, :bytes, 5
+ end
+ add_message "temporal.api.workflowservice.v1.UpdateScheduleRequest" do
+ optional :namespace, :string, 1
+ optional :schedule_id, :string, 2
+ optional :schedule, :message, 3, "temporal.api.schedule.v1.Schedule"
+ optional :conflict_token, :bytes, 4
+ optional :identity, :string, 5
+ optional :request_id, :string, 6
+ end
+ add_message "temporal.api.workflowservice.v1.UpdateScheduleResponse" do
+ end
+ add_message "temporal.api.workflowservice.v1.PatchScheduleRequest" do
+ optional :namespace, :string, 1
+ optional :schedule_id, :string, 2
+ optional :patch, :message, 3, "temporal.api.schedule.v1.SchedulePatch"
+ optional :identity, :string, 4
+ optional :request_id, :string, 5
+ end
+ add_message "temporal.api.workflowservice.v1.PatchScheduleResponse" do
+ end
+ add_message "temporal.api.workflowservice.v1.ListScheduleMatchingTimesRequest" do
+ optional :namespace, :string, 1
+ optional :schedule_id, :string, 2
+ optional :start_time, :message, 3, "google.protobuf.Timestamp"
+ optional :end_time, :message, 4, "google.protobuf.Timestamp"
+ end
+ add_message "temporal.api.workflowservice.v1.ListScheduleMatchingTimesResponse" do
+ repeated :start_time, :message, 1, "google.protobuf.Timestamp"
+ end
+ add_message "temporal.api.workflowservice.v1.DeleteScheduleRequest" do
+ optional :namespace, :string, 1
+ optional :schedule_id, :string, 2
+ optional :identity, :string, 3
+ end
+ add_message "temporal.api.workflowservice.v1.DeleteScheduleResponse" do
+ end
+ add_message "temporal.api.workflowservice.v1.ListSchedulesRequest" do
+ optional :namespace, :string, 1
+ optional :maximum_page_size, :int32, 2
+ optional :next_page_token, :bytes, 3
+ end
+ add_message "temporal.api.workflowservice.v1.ListSchedulesResponse" do
+ repeated :schedules, :message, 1, "temporal.api.schedule.v1.ScheduleListEntry"
+ optional :next_page_token, :bytes, 2
+ end
+ add_message "temporal.api.workflowservice.v1.UpdateWorkerBuildIdCompatibilityRequest" do
+ optional :namespace, :string, 1
+ optional :task_queue, :string, 2
+ oneof :operation do
+ optional :add_new_build_id_in_new_default_set, :string, 3
+ optional :add_new_compatible_build_id, :message, 4, "temporal.api.workflowservice.v1.UpdateWorkerBuildIdCompatibilityRequest.AddNewCompatibleVersion"
+ optional :promote_set_by_build_id, :string, 5
+ optional :promote_build_id_within_set, :string, 6
+ end
+ end
+ add_message "temporal.api.workflowservice.v1.UpdateWorkerBuildIdCompatibilityRequest.AddNewCompatibleVersion" do
+ optional :new_build_id, :string, 1
+ optional :existing_compatible_build_id, :string, 2
+ optional :make_set_default, :bool, 3
+ end
+ add_message "temporal.api.workflowservice.v1.UpdateWorkerBuildIdCompatibilityResponse" do
+ optional :version_set_id, :string, 1
+ end
+ add_message "temporal.api.workflowservice.v1.GetWorkerBuildIdCompatibilityRequest" do
+ optional :namespace, :string, 1
+ optional :task_queue, :string, 2
+ optional :max_sets, :int32, 3
+ optional :include_retirement_candidates, :bool, 4
+ optional :include_poller_compatibility, :bool, 5
+ end
+ add_message "temporal.api.workflowservice.v1.GetWorkerBuildIdCompatibilityResponse" do
+ repeated :major_version_sets, :message, 1, "temporal.api.taskqueue.v1.CompatibleVersionSet"
+ repeated :retirement_candidates, :message, 2, "temporal.api.workflowservice.v1.GetWorkerBuildIdCompatibilityResponse.RetirementCandidate"
+ repeated :active_versions_and_pollers, :message, 3, "temporal.api.workflowservice.v1.GetWorkerBuildIdCompatibilityResponse.VersionsWithCompatiblePollers"
+ end
+ add_message "temporal.api.workflowservice.v1.GetWorkerBuildIdCompatibilityResponse.RetirementCandidate" do
+ optional :build_id, :string, 1
+ optional :all_workflows_are_archived, :bool, 2
+ repeated :pollers, :message, 3, "temporal.api.taskqueue.v1.PollerInfo"
+ end
+ add_message "temporal.api.workflowservice.v1.GetWorkerBuildIdCompatibilityResponse.VersionsWithCompatiblePollers" do
+ optional :most_recent_build_id, :string, 1
+ repeated :pollers, :message, 2, "temporal.api.taskqueue.v1.PollerInfo"
+ end
+ add_message "temporal.api.workflowservice.v1.UpdateWorkflowExecutionRequest" do
+ optional :namespace, :string, 1
+ optional :workflow_execution, :message, 2, "temporal.api.common.v1.WorkflowExecution"
+ optional :first_execution_run_id, :string, 3
+ optional :wait_policy, :message, 4, "temporal.api.update.v1.WaitPolicy"
+ optional :request, :message, 5, "temporal.api.update.v1.Request"
+ end
+ add_message "temporal.api.workflowservice.v1.UpdateWorkflowExecutionResponse" do
+ optional :update_ref, :message, 1, "temporal.api.update.v1.UpdateRef"
+ optional :outcome, :message, 2, "temporal.api.update.v1.Outcome"
+ end
+ add_message "temporal.api.workflowservice.v1.StartBatchOperationRequest" do
+ optional :namespace, :string, 1
+ optional :visibility_query, :string, 2
+ optional :job_id, :string, 3
+ optional :reason, :string, 4
+ repeated :executions, :message, 5, "temporal.api.common.v1.WorkflowExecution"
+ oneof :operation do
+ optional :termination_operation, :message, 10, "temporal.api.batch.v1.BatchOperationTermination"
+ optional :signal_operation, :message, 11, "temporal.api.batch.v1.BatchOperationSignal"
+ optional :cancellation_operation, :message, 12, "temporal.api.batch.v1.BatchOperationCancellation"
+ optional :deletion_operation, :message, 13, "temporal.api.batch.v1.BatchOperationDeletion"
+ end
+ end
+ add_message "temporal.api.workflowservice.v1.StartBatchOperationResponse" do
+ end
+ add_message "temporal.api.workflowservice.v1.StopBatchOperationRequest" do
+ optional :namespace, :string, 1
+ optional :job_id, :string, 2
+ optional :reason, :string, 3
+ optional :identity, :string, 4
+ end
+ add_message "temporal.api.workflowservice.v1.StopBatchOperationResponse" do
+ end
+ add_message "temporal.api.workflowservice.v1.DescribeBatchOperationRequest" do
+ optional :namespace, :string, 1
+ optional :job_id, :string, 2
+ end
+ add_message "temporal.api.workflowservice.v1.DescribeBatchOperationResponse" do
+ optional :operation_type, :enum, 1, "temporal.api.enums.v1.BatchOperationType"
+ optional :job_id, :string, 2
+ optional :state, :enum, 3, "temporal.api.enums.v1.BatchOperationState"
+ optional :start_time, :message, 4, "google.protobuf.Timestamp"
+ optional :close_time, :message, 5, "google.protobuf.Timestamp"
+ optional :total_operation_count, :int64, 6
+ optional :complete_operation_count, :int64, 7
+ optional :failure_operation_count, :int64, 8
+ optional :identity, :string, 9
+ optional :reason, :string, 10
+ end
+ add_message "temporal.api.workflowservice.v1.ListBatchOperationsRequest" do
+ optional :namespace, :string, 1
+ optional :page_size, :int32, 2
+ optional :next_page_token, :bytes, 3
+ end
+ add_message "temporal.api.workflowservice.v1.ListBatchOperationsResponse" do
+ repeated :operation_info, :message, 1, "temporal.api.batch.v1.BatchOperationInfo"
+ optional :next_page_token, :bytes, 2
+ end
+ add_message "temporal.api.workflowservice.v1.PollWorkflowExecutionUpdateRequest" do
+ optional :namespace, :string, 1
+ optional :update_ref, :message, 2, "temporal.api.update.v1.UpdateRef"
+ optional :identity, :string, 3
+ optional :wait_policy, :message, 4, "temporal.api.update.v1.WaitPolicy"
+ end
+ add_message "temporal.api.workflowservice.v1.PollWorkflowExecutionUpdateResponse" do
+ optional :outcome, :message, 1, "temporal.api.update.v1.Outcome"
+ end
end
end
-module Temporal
+module Temporalio
module Api
module WorkflowService
module V1
@@ -481,6 +729,8 @@ module V1
StartWorkflowExecutionResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.StartWorkflowExecutionResponse").msgclass
GetWorkflowExecutionHistoryRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetWorkflowExecutionHistoryRequest").msgclass
GetWorkflowExecutionHistoryResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetWorkflowExecutionHistoryResponse").msgclass
+ GetWorkflowExecutionHistoryReverseRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetWorkflowExecutionHistoryReverseRequest").msgclass
+ GetWorkflowExecutionHistoryReverseResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetWorkflowExecutionHistoryReverseResponse").msgclass
PollWorkflowTaskQueueRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.PollWorkflowTaskQueueRequest").msgclass
PollWorkflowTaskQueueResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.PollWorkflowTaskQueueResponse").msgclass
RespondWorkflowTaskCompletedRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.RespondWorkflowTaskCompletedRequest").msgclass
@@ -515,6 +765,8 @@ module V1
ResetWorkflowExecutionResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.ResetWorkflowExecutionResponse").msgclass
TerminateWorkflowExecutionRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.TerminateWorkflowExecutionRequest").msgclass
TerminateWorkflowExecutionResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.TerminateWorkflowExecutionResponse").msgclass
+ DeleteWorkflowExecutionRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.DeleteWorkflowExecutionRequest").msgclass
+ DeleteWorkflowExecutionResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.DeleteWorkflowExecutionResponse").msgclass
ListOpenWorkflowExecutionsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.ListOpenWorkflowExecutionsRequest").msgclass
ListOpenWorkflowExecutionsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.ListOpenWorkflowExecutionsResponse").msgclass
ListClosedWorkflowExecutionsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.ListClosedWorkflowExecutionsRequest").msgclass
@@ -541,8 +793,44 @@ module V1
DescribeTaskQueueResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.DescribeTaskQueueResponse").msgclass
GetClusterInfoRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetClusterInfoRequest").msgclass
GetClusterInfoResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetClusterInfoResponse").msgclass
+ GetSystemInfoRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetSystemInfoRequest").msgclass
+ GetSystemInfoResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetSystemInfoResponse").msgclass
+ GetSystemInfoResponse::Capabilities = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetSystemInfoResponse.Capabilities").msgclass
ListTaskQueuePartitionsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.ListTaskQueuePartitionsRequest").msgclass
ListTaskQueuePartitionsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.ListTaskQueuePartitionsResponse").msgclass
+ CreateScheduleRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.CreateScheduleRequest").msgclass
+ CreateScheduleResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.CreateScheduleResponse").msgclass
+ DescribeScheduleRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.DescribeScheduleRequest").msgclass
+ DescribeScheduleResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.DescribeScheduleResponse").msgclass
+ UpdateScheduleRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.UpdateScheduleRequest").msgclass
+ UpdateScheduleResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.UpdateScheduleResponse").msgclass
+ PatchScheduleRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.PatchScheduleRequest").msgclass
+ PatchScheduleResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.PatchScheduleResponse").msgclass
+ ListScheduleMatchingTimesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.ListScheduleMatchingTimesRequest").msgclass
+ ListScheduleMatchingTimesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.ListScheduleMatchingTimesResponse").msgclass
+ DeleteScheduleRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.DeleteScheduleRequest").msgclass
+ DeleteScheduleResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.DeleteScheduleResponse").msgclass
+ ListSchedulesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.ListSchedulesRequest").msgclass
+ ListSchedulesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.ListSchedulesResponse").msgclass
+ UpdateWorkerBuildIdCompatibilityRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.UpdateWorkerBuildIdCompatibilityRequest").msgclass
+ UpdateWorkerBuildIdCompatibilityRequest::AddNewCompatibleVersion = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.UpdateWorkerBuildIdCompatibilityRequest.AddNewCompatibleVersion").msgclass
+ UpdateWorkerBuildIdCompatibilityResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.UpdateWorkerBuildIdCompatibilityResponse").msgclass
+ GetWorkerBuildIdCompatibilityRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetWorkerBuildIdCompatibilityRequest").msgclass
+ GetWorkerBuildIdCompatibilityResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetWorkerBuildIdCompatibilityResponse").msgclass
+ GetWorkerBuildIdCompatibilityResponse::RetirementCandidate = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetWorkerBuildIdCompatibilityResponse.RetirementCandidate").msgclass
+ GetWorkerBuildIdCompatibilityResponse::VersionsWithCompatiblePollers = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.GetWorkerBuildIdCompatibilityResponse.VersionsWithCompatiblePollers").msgclass
+ UpdateWorkflowExecutionRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.UpdateWorkflowExecutionRequest").msgclass
+ UpdateWorkflowExecutionResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.UpdateWorkflowExecutionResponse").msgclass
+ StartBatchOperationRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.StartBatchOperationRequest").msgclass
+ StartBatchOperationResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.StartBatchOperationResponse").msgclass
+ StopBatchOperationRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.StopBatchOperationRequest").msgclass
+ StopBatchOperationResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.StopBatchOperationResponse").msgclass
+ DescribeBatchOperationRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.DescribeBatchOperationRequest").msgclass
+ DescribeBatchOperationResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.DescribeBatchOperationResponse").msgclass
+ ListBatchOperationsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.ListBatchOperationsRequest").msgclass
+ ListBatchOperationsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.ListBatchOperationsResponse").msgclass
+ PollWorkflowExecutionUpdateRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.PollWorkflowExecutionUpdateRequest").msgclass
+ PollWorkflowExecutionUpdateResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("temporal.api.workflowservice.v1.PollWorkflowExecutionUpdateResponse").msgclass
end
end
end
diff --git a/lib/gen/temporal/api/workflowservice/v1/service_pb.rb b/lib/gen/temporal/api/workflowservice/v1/service_pb.rb
index a5e87578..667b8800 100644
--- a/lib/gen/temporal/api/workflowservice/v1/service_pb.rb
+++ b/lib/gen/temporal/api/workflowservice/v1/service_pb.rb
@@ -4,12 +4,13 @@
require 'google/protobuf'
require 'temporal/api/workflowservice/v1/request_response_pb'
+
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("temporal/api/workflowservice/v1/service.proto", :syntax => :proto3) do
end
end
-module Temporal
+module Temporalio
module Api
module WorkflowService
module V1
diff --git a/lib/gen/temporal/api/workflowservice/v1/service_services_pb.rb b/lib/gen/temporal/api/workflowservice/v1/service_services_pb.rb
index 8a3b1dab..3acb9d26 100644
--- a/lib/gen/temporal/api/workflowservice/v1/service_services_pb.rb
+++ b/lib/gen/temporal/api/workflowservice/v1/service_services_pb.rb
@@ -1,5 +1,5 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
-# Source: temporal/api/workflowservice/v1/service.proto for package 'Temporal.Api.WorkflowService.V1'
+# Source: temporal/api/workflowservice/v1/service.proto for package 'Temporalio.Api.WorkflowService.V1'
# Original file comments:
# The MIT License
#
@@ -27,189 +27,298 @@
require 'grpc'
require 'temporal/api/workflowservice/v1/service_pb'
-module Temporal
+module Temporalio
module Api
module WorkflowService
module V1
module WorkflowService
- # WorkflowService API is exposed to provide support for long running applications. Application is expected to call
- # StartWorkflowExecution to create an instance for each instance of long running workflow. Such applications are expected
- # to have a worker which regularly polls for WorkflowTask and ActivityTask from the WorkflowService. For each
- # WorkflowTask, application is expected to process the history of events for that session and respond back with next
- # commands. For each ActivityTask, application is expected to execute the actual logic for that task and respond back
- # with completion or failure. Worker is expected to regularly heartbeat while activity task is running.
+ # WorkflowService API defines how Temporal SDKs and other clients interact with the Temporal server
+ # to create and interact with workflows and activities.
+ #
+ # Users are expected to call `StartWorkflowExecution` to create a new workflow execution.
+ #
+ # To drive workflows, a worker using a Temporal SDK must exist which regularly polls for workflow
+ # and activity tasks from the service. For each workflow task, the sdk must process the
+ # (incremental or complete) event history and respond back with any newly generated commands.
+ #
+ # For each activity task, the worker is expected to execute the user's code which implements that
+ # activity, responding with completion or failure.
class Service
- include GRPC::GenericService
+ include ::GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'temporal.api.workflowservice.v1.WorkflowService'
- # RegisterNamespace creates a new namespace which can be used as a container for all resources. Namespace is a top level
- # entity within Temporal, used as a container for all resources like workflow executions, task queues, etc. Namespace
- # acts as a sandbox and provides isolation for all resources within the namespace. All resources belongs to exactly one
+ # RegisterNamespace creates a new namespace which can be used as a container for all resources.
+ #
+ # A Namespace is a top level entity within Temporal, and is used as a container for resources
+ # like workflow executions, task queues, etc. A Namespace acts as a sandbox and provides
+ # isolation for all resources within the namespace. All resources belongs to exactly one
# namespace.
- rpc :RegisterNamespace, ::Temporal::Api::WorkflowService::V1::RegisterNamespaceRequest, ::Temporal::Api::WorkflowService::V1::RegisterNamespaceResponse
+ rpc :RegisterNamespace, ::Temporalio::Api::WorkflowService::V1::RegisterNamespaceRequest, ::Temporalio::Api::WorkflowService::V1::RegisterNamespaceResponse
# DescribeNamespace returns the information and configuration for a registered namespace.
- rpc :DescribeNamespace, ::Temporal::Api::WorkflowService::V1::DescribeNamespaceRequest, ::Temporal::Api::WorkflowService::V1::DescribeNamespaceResponse
+ rpc :DescribeNamespace, ::Temporalio::Api::WorkflowService::V1::DescribeNamespaceRequest, ::Temporalio::Api::WorkflowService::V1::DescribeNamespaceResponse
# ListNamespaces returns the information and configuration for all namespaces.
- rpc :ListNamespaces, ::Temporal::Api::WorkflowService::V1::ListNamespacesRequest, ::Temporal::Api::WorkflowService::V1::ListNamespacesResponse
+ rpc :ListNamespaces, ::Temporalio::Api::WorkflowService::V1::ListNamespacesRequest, ::Temporalio::Api::WorkflowService::V1::ListNamespacesResponse
+ # UpdateNamespace is used to update the information and configuration of a registered
+ # namespace.
+ #
# (-- api-linter: core::0134::method-signature=disabled
# aip.dev/not-precedent: UpdateNamespace RPC doesn't follow Google API format. --)
# (-- api-linter: core::0134::response-message-name=disabled
# aip.dev/not-precedent: UpdateNamespace RPC doesn't follow Google API format. --)
- # UpdateNamespace is used to update the information and configuration for a registered namespace.
- rpc :UpdateNamespace, ::Temporal::Api::WorkflowService::V1::UpdateNamespaceRequest, ::Temporal::Api::WorkflowService::V1::UpdateNamespaceResponse
- # DeprecateNamespace is used to update state of a registered namespace to DEPRECATED. Once the namespace is deprecated
- # it cannot be used to start new workflow executions. Existing workflow executions will continue to run on
- # deprecated namespaces.
- rpc :DeprecateNamespace, ::Temporal::Api::WorkflowService::V1::DeprecateNamespaceRequest, ::Temporal::Api::WorkflowService::V1::DeprecateNamespaceResponse
- # StartWorkflowExecution starts a new long running workflow instance. It will create the instance with
- # 'WorkflowExecutionStarted' event in history and also schedule the first WorkflowTask for the worker to make the
- # first command for this instance. It will return 'WorkflowExecutionAlreadyStartedFailure', if an instance already
- # exists with same workflowId.
- rpc :StartWorkflowExecution, ::Temporal::Api::WorkflowService::V1::StartWorkflowExecutionRequest, ::Temporal::Api::WorkflowService::V1::StartWorkflowExecutionResponse
- # GetWorkflowExecutionHistory returns the history of specified workflow execution. It fails with 'NotFoundFailure' if specified workflow
- # execution in unknown to the service.
- rpc :GetWorkflowExecutionHistory, ::Temporal::Api::WorkflowService::V1::GetWorkflowExecutionHistoryRequest, ::Temporal::Api::WorkflowService::V1::GetWorkflowExecutionHistoryResponse
- # PollWorkflowTaskQueue is called by application worker to process WorkflowTask from a specific task queue. A
- # WorkflowTask is dispatched to callers for active workflow executions, with pending workflow tasks.
- # Application is then expected to call 'RespondWorkflowTaskCompleted' API when it is done processing the WorkflowTask.
- # It will also create a 'WorkflowTaskStarted' event in the history for that session before handing off WorkflowTask to
- # application worker.
- rpc :PollWorkflowTaskQueue, ::Temporal::Api::WorkflowService::V1::PollWorkflowTaskQueueRequest, ::Temporal::Api::WorkflowService::V1::PollWorkflowTaskQueueResponse
- # RespondWorkflowTaskCompleted is called by application worker to complete a WorkflowTask handed as a result of
- # 'PollWorkflowTaskQueue' API call. Completing a WorkflowTask will result in new events for the workflow execution and
- # potentially new ActivityTask being created for corresponding commands. It will also create a WorkflowTaskCompleted
- # event in the history for that session. Use the 'taskToken' provided as response of PollWorkflowTaskQueue API call
- # for completing the WorkflowTask.
- # The response could contain a new workflow task if there is one or if the request asking for one.
- rpc :RespondWorkflowTaskCompleted, ::Temporal::Api::WorkflowService::V1::RespondWorkflowTaskCompletedRequest, ::Temporal::Api::WorkflowService::V1::RespondWorkflowTaskCompletedResponse
- # RespondWorkflowTaskFailed is called by application worker to indicate failure. This results in
- # WorkflowTaskFailedEvent written to the history and a new WorkflowTask created. This API can be used by client to
- # either clear sticky task queue or report any panics during WorkflowTask processing. Temporal will only append first
- # WorkflowTaskFailed event to the history of workflow execution for consecutive failures.
- rpc :RespondWorkflowTaskFailed, ::Temporal::Api::WorkflowService::V1::RespondWorkflowTaskFailedRequest, ::Temporal::Api::WorkflowService::V1::RespondWorkflowTaskFailedResponse
- # PollActivityTaskQueue is called by application worker to process ActivityTask from a specific task queue. ActivityTask
- # is dispatched to callers whenever a ScheduleTask command is made for a workflow execution.
- # Application is expected to call 'RespondActivityTaskCompleted' or 'RespondActivityTaskFailed' once it is done
+ rpc :UpdateNamespace, ::Temporalio::Api::WorkflowService::V1::UpdateNamespaceRequest, ::Temporalio::Api::WorkflowService::V1::UpdateNamespaceResponse
+ # DeprecateNamespace is used to update the state of a registered namespace to DEPRECATED.
+ #
+ # Once the namespace is deprecated it cannot be used to start new workflow executions. Existing
+ # workflow executions will continue to run on deprecated namespaces.
+ # Deprecated.
+ rpc :DeprecateNamespace, ::Temporalio::Api::WorkflowService::V1::DeprecateNamespaceRequest, ::Temporalio::Api::WorkflowService::V1::DeprecateNamespaceResponse
+ # StartWorkflowExecution starts a new workflow execution.
+ #
+ # It will create the execution with a `WORKFLOW_EXECUTION_STARTED` event in its history and
+ # also schedule the first workflow task. Returns `WorkflowExecutionAlreadyStarted`, if an
+ # instance already exists with same workflow id.
+ rpc :StartWorkflowExecution, ::Temporalio::Api::WorkflowService::V1::StartWorkflowExecutionRequest, ::Temporalio::Api::WorkflowService::V1::StartWorkflowExecutionResponse
+ # GetWorkflowExecutionHistory returns the history of specified workflow execution. Fails with
+ # `NotFound` if the specified workflow execution is unknown to the service.
+ rpc :GetWorkflowExecutionHistory, ::Temporalio::Api::WorkflowService::V1::GetWorkflowExecutionHistoryRequest, ::Temporalio::Api::WorkflowService::V1::GetWorkflowExecutionHistoryResponse
+ # GetWorkflowExecutionHistoryReverse returns the history of specified workflow execution in reverse
+ # order (starting from last event). Fails with`NotFound` if the specified workflow execution is
+ # unknown to the service.
+ rpc :GetWorkflowExecutionHistoryReverse, ::Temporalio::Api::WorkflowService::V1::GetWorkflowExecutionHistoryReverseRequest, ::Temporalio::Api::WorkflowService::V1::GetWorkflowExecutionHistoryReverseResponse
+ # PollWorkflowTaskQueue is called by workers to make progress on workflows.
+ #
+ # A WorkflowTask is dispatched to callers for active workflow executions with pending workflow
+ # tasks. The worker is expected to call `RespondWorkflowTaskCompleted` when it is done
+ # processing the task. The service will create a `WorkflowTaskStarted` event in the history for
+ # this task before handing it to the worker.
+ rpc :PollWorkflowTaskQueue, ::Temporalio::Api::WorkflowService::V1::PollWorkflowTaskQueueRequest, ::Temporalio::Api::WorkflowService::V1::PollWorkflowTaskQueueResponse
+ # RespondWorkflowTaskCompleted is called by workers to successfully complete workflow tasks
+ # they received from `PollWorkflowTaskQueue`.
+ #
+ # Completing a WorkflowTask will write a `WORKFLOW_TASK_COMPLETED` event to the workflow's
+ # history, along with events corresponding to whatever commands the SDK generated while
+ # executing the task (ex timer started, activity task scheduled, etc).
+ rpc :RespondWorkflowTaskCompleted, ::Temporalio::Api::WorkflowService::V1::RespondWorkflowTaskCompletedRequest, ::Temporalio::Api::WorkflowService::V1::RespondWorkflowTaskCompletedResponse
+ # RespondWorkflowTaskFailed is called by workers to indicate the processing of a workflow task
+ # failed.
+ #
+ # This results in a `WORKFLOW_TASK_FAILED` event written to the history, and a new workflow
+ # task will be scheduled. This API can be used to report unhandled failures resulting from
+ # applying the workflow task.
+ #
+ # Temporal will only append first WorkflowTaskFailed event to the history of workflow execution
+ # for consecutive failures.
+ rpc :RespondWorkflowTaskFailed, ::Temporalio::Api::WorkflowService::V1::RespondWorkflowTaskFailedRequest, ::Temporalio::Api::WorkflowService::V1::RespondWorkflowTaskFailedResponse
+ # PollActivityTaskQueue is called by workers to process activity tasks from a specific task
+ # queue.
+ #
+ # The worker is expected to call one of the `RespondActivityTaskXXX` methods when it is done
# processing the task.
- # Application also needs to call 'RecordActivityTaskHeartbeat' API within 'heartbeatTimeoutSeconds' interval to
- # prevent the task from getting timed out. An event 'ActivityTaskStarted' event is also written to workflow execution
- # history before the ActivityTask is dispatched to application worker.
- rpc :PollActivityTaskQueue, ::Temporal::Api::WorkflowService::V1::PollActivityTaskQueueRequest, ::Temporal::Api::WorkflowService::V1::PollActivityTaskQueueResponse
- # RecordActivityTaskHeartbeat is called by application worker while it is processing an ActivityTask. If worker fails
- # to heartbeat within 'heartbeatTimeoutSeconds' interval for the ActivityTask, then it will be marked as timedout and
- # 'ActivityTaskTimedOut' event will be written to the workflow history. Calling 'RecordActivityTaskHeartbeat' will
- # fail with 'NotFoundFailure' in such situations. Use the 'taskToken' provided as response of
- # PollActivityTaskQueue API call for heart beating.
- rpc :RecordActivityTaskHeartbeat, ::Temporal::Api::WorkflowService::V1::RecordActivityTaskHeartbeatRequest, ::Temporal::Api::WorkflowService::V1::RecordActivityTaskHeartbeatResponse
+ #
+ # An activity task is dispatched whenever a `SCHEDULE_ACTIVITY_TASK` command is produced during
+ # workflow execution. An in memory `ACTIVITY_TASK_STARTED` event is written to mutable state
+ # before the task is dispatched to the worker. The started event, and the final event
+ # (`ACTIVITY_TASK_COMPLETED` / `ACTIVITY_TASK_FAILED` / `ACTIVITY_TASK_TIMED_OUT`) will both be
+ # written permanently to Workflow execution history when Activity is finished. This is done to
+ # avoid writing many events in the case of a failure/retry loop.
+ rpc :PollActivityTaskQueue, ::Temporalio::Api::WorkflowService::V1::PollActivityTaskQueueRequest, ::Temporalio::Api::WorkflowService::V1::PollActivityTaskQueueResponse
+ # RecordActivityTaskHeartbeat is optionally called by workers while they execute activities.
+ #
+ # If worker fails to heartbeat within the `heartbeat_timeout` interval for the activity task,
+ # then it will be marked as timed out and an `ACTIVITY_TASK_TIMED_OUT` event will be written to
+ # the workflow history. Calling `RecordActivityTaskHeartbeat` will fail with `NotFound` in
+ # such situations, in that event, the SDK should request cancellation of the activity.
+ rpc :RecordActivityTaskHeartbeat, ::Temporalio::Api::WorkflowService::V1::RecordActivityTaskHeartbeatRequest, ::Temporalio::Api::WorkflowService::V1::RecordActivityTaskHeartbeatResponse
+ # See `RecordActivityTaskHeartbeat`. This version allows clients to record heartbeats by
+ # namespace/workflow id/activity id instead of task token.
+ #
# (-- api-linter: core::0136::prepositions=disabled
# aip.dev/not-precedent: "By" is used to indicate request type. --)
- # RecordActivityTaskHeartbeatById is called by application worker while it is processing an ActivityTask. If worker fails
- # to heartbeat within 'heartbeatTimeoutSeconds' interval for the ActivityTask, then it will be marked as timed out and
- # 'ActivityTaskTimedOut' event will be written to the workflow history. Calling 'RecordActivityTaskHeartbeatById' will
- # fail with 'NotFoundFailure' in such situations. Instead of using 'taskToken' like in RecordActivityTaskHeartbeat,
- # use Namespace, WorkflowId and ActivityId
- rpc :RecordActivityTaskHeartbeatById, ::Temporal::Api::WorkflowService::V1::RecordActivityTaskHeartbeatByIdRequest, ::Temporal::Api::WorkflowService::V1::RecordActivityTaskHeartbeatByIdResponse
- # RespondActivityTaskCompleted is called by application worker when it is done processing an ActivityTask. It will
- # result in a new 'ActivityTaskCompleted' event being written to the workflow history and a new WorkflowTask
- # created for the workflow so new commands could be made. Use the 'taskToken' provided as response of
- # PollActivityTaskQueue API call for completion. It fails with 'NotFoundFailure' if the taskToken is not valid
- # anymore due to activity timeout.
- rpc :RespondActivityTaskCompleted, ::Temporal::Api::WorkflowService::V1::RespondActivityTaskCompletedRequest, ::Temporal::Api::WorkflowService::V1::RespondActivityTaskCompletedResponse
+ rpc :RecordActivityTaskHeartbeatById, ::Temporalio::Api::WorkflowService::V1::RecordActivityTaskHeartbeatByIdRequest, ::Temporalio::Api::WorkflowService::V1::RecordActivityTaskHeartbeatByIdResponse
+ # RespondActivityTaskCompleted is called by workers when they successfully complete an activity
+ # task.
+ #
+ # This results in a new `ACTIVITY_TASK_COMPLETED` event being written to the workflow history
+ # and a new workflow task created for the workflow. Fails with `NotFound` if the task token is
+ # no longer valid due to activity timeout, already being completed, or never having existed.
+ rpc :RespondActivityTaskCompleted, ::Temporalio::Api::WorkflowService::V1::RespondActivityTaskCompletedRequest, ::Temporalio::Api::WorkflowService::V1::RespondActivityTaskCompletedResponse
+ # See `RecordActivityTaskCompleted`. This version allows clients to record completions by
+ # namespace/workflow id/activity id instead of task token.
+ #
# (-- api-linter: core::0136::prepositions=disabled
# aip.dev/not-precedent: "By" is used to indicate request type. --)
- # RespondActivityTaskCompletedById is called by application worker when it is done processing an ActivityTask.
- # It will result in a new 'ActivityTaskCompleted' event being written to the workflow history and a new WorkflowTask
- # created for the workflow so new commands could be made. Similar to RespondActivityTaskCompleted but use Namespace,
- # WorkflowId and ActivityId instead of 'taskToken' for completion. It fails with 'NotFoundFailure'
- # if the these Ids are not valid anymore due to activity timeout.
- rpc :RespondActivityTaskCompletedById, ::Temporal::Api::WorkflowService::V1::RespondActivityTaskCompletedByIdRequest, ::Temporal::Api::WorkflowService::V1::RespondActivityTaskCompletedByIdResponse
- # RespondActivityTaskFailed is called by application worker when it is done processing an ActivityTask. It will
- # result in a new 'ActivityTaskFailed' event being written to the workflow history and a new WorkflowTask
- # created for the workflow instance so new commands could be made. Use the 'taskToken' provided as response of
- # PollActivityTaskQueue API call for completion. It fails with 'NotFoundFailure' if the taskToken is not valid
- # anymore due to activity timeout.
- rpc :RespondActivityTaskFailed, ::Temporal::Api::WorkflowService::V1::RespondActivityTaskFailedRequest, ::Temporal::Api::WorkflowService::V1::RespondActivityTaskFailedResponse
+ rpc :RespondActivityTaskCompletedById, ::Temporalio::Api::WorkflowService::V1::RespondActivityTaskCompletedByIdRequest, ::Temporalio::Api::WorkflowService::V1::RespondActivityTaskCompletedByIdResponse
+ # RespondActivityTaskFailed is called by workers when processing an activity task fails.
+ #
+ # This results in a new `ACTIVITY_TASK_FAILED` event being written to the workflow history and
+ # a new workflow task created for the workflow. Fails with `NotFound` if the task token is no
+ # longer valid due to activity timeout, already being completed, or never having existed.
+ rpc :RespondActivityTaskFailed, ::Temporalio::Api::WorkflowService::V1::RespondActivityTaskFailedRequest, ::Temporalio::Api::WorkflowService::V1::RespondActivityTaskFailedResponse
+ # See `RecordActivityTaskFailed`. This version allows clients to record failures by
+ # namespace/workflow id/activity id instead of task token.
+ #
# (-- api-linter: core::0136::prepositions=disabled
# aip.dev/not-precedent: "By" is used to indicate request type. --)
- # RespondActivityTaskFailedById is called by application worker when it is done processing an ActivityTask.
- # It will result in a new 'ActivityTaskFailed' event being written to the workflow history and a new WorkflowTask
- # created for the workflow instance so new commands could be made. Similar to RespondActivityTaskFailed but use
- # Namespace, WorkflowId and ActivityId instead of 'taskToken' for completion. It fails with 'NotFoundFailure'
- # if the these Ids are not valid anymore due to activity timeout.
- rpc :RespondActivityTaskFailedById, ::Temporal::Api::WorkflowService::V1::RespondActivityTaskFailedByIdRequest, ::Temporal::Api::WorkflowService::V1::RespondActivityTaskFailedByIdResponse
- # RespondActivityTaskCanceled is called by application worker when it is successfully canceled an ActivityTask. It will
- # result in a new 'ActivityTaskCanceled' event being written to the workflow history and a new WorkflowTask
- # created for the workflow instance so new commands could be made. Use the 'taskToken' provided as response of
- # PollActivityTaskQueue API call for completion. It fails with 'NotFoundFailure' if the taskToken is not valid
- # anymore due to activity timeout.
- rpc :RespondActivityTaskCanceled, ::Temporal::Api::WorkflowService::V1::RespondActivityTaskCanceledRequest, ::Temporal::Api::WorkflowService::V1::RespondActivityTaskCanceledResponse
+ rpc :RespondActivityTaskFailedById, ::Temporalio::Api::WorkflowService::V1::RespondActivityTaskFailedByIdRequest, ::Temporalio::Api::WorkflowService::V1::RespondActivityTaskFailedByIdResponse
+ # RespondActivityTaskFailed is called by workers when processing an activity task fails.
+ #
+ # This results in a new `ACTIVITY_TASK_CANCELED` event being written to the workflow history
+ # and a new workflow task created for the workflow. Fails with `NotFound` if the task token is
+ # no longer valid due to activity timeout, already being completed, or never having existed.
+ rpc :RespondActivityTaskCanceled, ::Temporalio::Api::WorkflowService::V1::RespondActivityTaskCanceledRequest, ::Temporalio::Api::WorkflowService::V1::RespondActivityTaskCanceledResponse
+ # See `RecordActivityTaskCanceled`. This version allows clients to record failures by
+ # namespace/workflow id/activity id instead of task token.
+ #
# (-- api-linter: core::0136::prepositions=disabled
# aip.dev/not-precedent: "By" is used to indicate request type. --)
- # RespondActivityTaskCanceledById is called by application worker when it is successfully canceled an ActivityTask.
- # It will result in a new 'ActivityTaskCanceled' event being written to the workflow history and a new WorkflowTask
- # created for the workflow instance so new commands could be made. Similar to RespondActivityTaskCanceled but use
- # Namespace, WorkflowId and ActivityId instead of 'taskToken' for completion. It fails with 'NotFoundFailure'
- # if the these Ids are not valid anymore due to activity timeout.
- rpc :RespondActivityTaskCanceledById, ::Temporal::Api::WorkflowService::V1::RespondActivityTaskCanceledByIdRequest, ::Temporal::Api::WorkflowService::V1::RespondActivityTaskCanceledByIdResponse
- # RequestCancelWorkflowExecution is called by application worker when it wants to request cancellation of a workflow instance.
- # It will result in a new 'WorkflowExecutionCancelRequested' event being written to the workflow history and a new WorkflowTask
- # created for the workflow instance so new commands could be made. It fails with 'NotFoundFailure' if the workflow is not valid
- # anymore due to completion or doesn't exist.
- rpc :RequestCancelWorkflowExecution, ::Temporal::Api::WorkflowService::V1::RequestCancelWorkflowExecutionRequest, ::Temporal::Api::WorkflowService::V1::RequestCancelWorkflowExecutionResponse
- # SignalWorkflowExecution is used to send a signal event to running workflow execution. This results in
- # WorkflowExecutionSignaled event recorded in the history and a workflow task being created for the execution.
- rpc :SignalWorkflowExecution, ::Temporal::Api::WorkflowService::V1::SignalWorkflowExecutionRequest, ::Temporal::Api::WorkflowService::V1::SignalWorkflowExecutionResponse
+ rpc :RespondActivityTaskCanceledById, ::Temporalio::Api::WorkflowService::V1::RespondActivityTaskCanceledByIdRequest, ::Temporalio::Api::WorkflowService::V1::RespondActivityTaskCanceledByIdResponse
+ # RequestCancelWorkflowExecution is called by workers when they want to request cancellation of
+ # a workflow execution.
+ #
+ # This results in a new `WORKFLOW_EXECUTION_CANCEL_REQUESTED` event being written to the
+ # workflow history and a new workflow task created for the workflow. It returns success if the requested
+ # workflow is already closed. It fails with 'NotFound' if the requested workflow doesn't exist.
+ rpc :RequestCancelWorkflowExecution, ::Temporalio::Api::WorkflowService::V1::RequestCancelWorkflowExecutionRequest, ::Temporalio::Api::WorkflowService::V1::RequestCancelWorkflowExecutionResponse
+ # SignalWorkflowExecution is used to send a signal to a running workflow execution.
+ #
+ # This results in a `WORKFLOW_EXECUTION_SIGNALED` event recorded in the history and a workflow
+ # task being created for the execution.
+ rpc :SignalWorkflowExecution, ::Temporalio::Api::WorkflowService::V1::SignalWorkflowExecutionRequest, ::Temporalio::Api::WorkflowService::V1::SignalWorkflowExecutionResponse
+ # SignalWithStartWorkflowExecution is used to ensure a signal is sent to a workflow, even if
+ # it isn't yet started.
+ #
+ # If the workflow is running, a `WORKFLOW_EXECUTION_SIGNALED` event is recorded in the history
+ # and a workflow task is generated.
+ #
+ # If the workflow is not running or not found, then the workflow is created with
+ # `WORKFLOW_EXECUTION_STARTED` and `WORKFLOW_EXECUTION_SIGNALED` events in its history, and a
+ # workflow task is generated.
+ #
# (-- api-linter: core::0136::prepositions=disabled
# aip.dev/not-precedent: "With" is used to indicate combined operation. --)
- # SignalWithStartWorkflowExecution is used to ensure sending signal to a workflow.
- # If the workflow is running, this results in WorkflowExecutionSignaled event being recorded in the history
- # and a workflow task being created for the execution.
- # If the workflow is not running or not found, this results in WorkflowExecutionStarted and WorkflowExecutionSignaled
- # events being recorded in history, and a workflow task being created for the execution
- rpc :SignalWithStartWorkflowExecution, ::Temporal::Api::WorkflowService::V1::SignalWithStartWorkflowExecutionRequest, ::Temporal::Api::WorkflowService::V1::SignalWithStartWorkflowExecutionResponse
- # ResetWorkflowExecution reset an existing workflow execution to WorkflowTaskCompleted event(exclusive).
- # And it will immediately terminating the current execution instance.
- rpc :ResetWorkflowExecution, ::Temporal::Api::WorkflowService::V1::ResetWorkflowExecutionRequest, ::Temporal::Api::WorkflowService::V1::ResetWorkflowExecutionResponse
- # TerminateWorkflowExecution terminates an existing workflow execution by recording WorkflowExecutionTerminated event
- # in the history and immediately terminating the execution instance.
- rpc :TerminateWorkflowExecution, ::Temporal::Api::WorkflowService::V1::TerminateWorkflowExecutionRequest, ::Temporal::Api::WorkflowService::V1::TerminateWorkflowExecutionResponse
+ rpc :SignalWithStartWorkflowExecution, ::Temporalio::Api::WorkflowService::V1::SignalWithStartWorkflowExecutionRequest, ::Temporalio::Api::WorkflowService::V1::SignalWithStartWorkflowExecutionResponse
+ # ResetWorkflowExecution will reset an existing workflow execution to a specified
+ # `WORKFLOW_TASK_COMPLETED` event (exclusive). It will immediately terminate the current
+ # execution instance.
+ # TODO: Does exclusive here mean *just* the completed event, or also WFT started? Otherwise the task is doomed to time out?
+ rpc :ResetWorkflowExecution, ::Temporalio::Api::WorkflowService::V1::ResetWorkflowExecutionRequest, ::Temporalio::Api::WorkflowService::V1::ResetWorkflowExecutionResponse
+ # TerminateWorkflowExecution terminates an existing workflow execution by recording a
+ # `WORKFLOW_EXECUTION_TERMINATED` event in the history and immediately terminating the
+ # execution instance.
+ rpc :TerminateWorkflowExecution, ::Temporalio::Api::WorkflowService::V1::TerminateWorkflowExecutionRequest, ::Temporalio::Api::WorkflowService::V1::TerminateWorkflowExecutionResponse
+ # DeleteWorkflowExecution asynchronously deletes a specific Workflow Execution (when
+ # WorkflowExecution.run_id is provided) or the latest Workflow Execution (when
+ # WorkflowExecution.run_id is not provided). If the Workflow Execution is Running, it will be
+ # terminated before deletion.
+ # (-- api-linter: core::0135::method-signature=disabled
+ # aip.dev/not-precedent: DeleteNamespace RPC doesn't follow Google API format. --)
+ # (-- api-linter: core::0135::response-message-name=disabled
+ # aip.dev/not-precedent: DeleteNamespace RPC doesn't follow Google API format. --)
+ rpc :DeleteWorkflowExecution, ::Temporalio::Api::WorkflowService::V1::DeleteWorkflowExecutionRequest, ::Temporalio::Api::WorkflowService::V1::DeleteWorkflowExecutionResponse
# ListOpenWorkflowExecutions is a visibility API to list the open executions in a specific namespace.
- rpc :ListOpenWorkflowExecutions, ::Temporal::Api::WorkflowService::V1::ListOpenWorkflowExecutionsRequest, ::Temporal::Api::WorkflowService::V1::ListOpenWorkflowExecutionsResponse
+ rpc :ListOpenWorkflowExecutions, ::Temporalio::Api::WorkflowService::V1::ListOpenWorkflowExecutionsRequest, ::Temporalio::Api::WorkflowService::V1::ListOpenWorkflowExecutionsResponse
# ListClosedWorkflowExecutions is a visibility API to list the closed executions in a specific namespace.
- rpc :ListClosedWorkflowExecutions, ::Temporal::Api::WorkflowService::V1::ListClosedWorkflowExecutionsRequest, ::Temporal::Api::WorkflowService::V1::ListClosedWorkflowExecutionsResponse
+ rpc :ListClosedWorkflowExecutions, ::Temporalio::Api::WorkflowService::V1::ListClosedWorkflowExecutionsRequest, ::Temporalio::Api::WorkflowService::V1::ListClosedWorkflowExecutionsResponse
# ListWorkflowExecutions is a visibility API to list workflow executions in a specific namespace.
- rpc :ListWorkflowExecutions, ::Temporal::Api::WorkflowService::V1::ListWorkflowExecutionsRequest, ::Temporal::Api::WorkflowService::V1::ListWorkflowExecutionsResponse
+ rpc :ListWorkflowExecutions, ::Temporalio::Api::WorkflowService::V1::ListWorkflowExecutionsRequest, ::Temporalio::Api::WorkflowService::V1::ListWorkflowExecutionsResponse
# ListArchivedWorkflowExecutions is a visibility API to list archived workflow executions in a specific namespace.
- rpc :ListArchivedWorkflowExecutions, ::Temporal::Api::WorkflowService::V1::ListArchivedWorkflowExecutionsRequest, ::Temporal::Api::WorkflowService::V1::ListArchivedWorkflowExecutionsResponse
+ rpc :ListArchivedWorkflowExecutions, ::Temporalio::Api::WorkflowService::V1::ListArchivedWorkflowExecutionsRequest, ::Temporalio::Api::WorkflowService::V1::ListArchivedWorkflowExecutionsResponse
# ScanWorkflowExecutions is a visibility API to list large amount of workflow executions in a specific namespace without order.
- rpc :ScanWorkflowExecutions, ::Temporal::Api::WorkflowService::V1::ScanWorkflowExecutionsRequest, ::Temporal::Api::WorkflowService::V1::ScanWorkflowExecutionsResponse
+ rpc :ScanWorkflowExecutions, ::Temporalio::Api::WorkflowService::V1::ScanWorkflowExecutionsRequest, ::Temporalio::Api::WorkflowService::V1::ScanWorkflowExecutionsResponse
# CountWorkflowExecutions is a visibility API to count of workflow executions in a specific namespace.
- rpc :CountWorkflowExecutions, ::Temporal::Api::WorkflowService::V1::CountWorkflowExecutionsRequest, ::Temporal::Api::WorkflowService::V1::CountWorkflowExecutionsResponse
+ rpc :CountWorkflowExecutions, ::Temporalio::Api::WorkflowService::V1::CountWorkflowExecutionsRequest, ::Temporalio::Api::WorkflowService::V1::CountWorkflowExecutionsResponse
# GetSearchAttributes is a visibility API to get all legal keys that could be used in list APIs
- rpc :GetSearchAttributes, ::Temporal::Api::WorkflowService::V1::GetSearchAttributesRequest, ::Temporal::Api::WorkflowService::V1::GetSearchAttributesResponse
- # RespondQueryTaskCompleted is called by application worker to complete a QueryTask (which is a WorkflowTask for query)
- # as a result of 'PollWorkflowTaskQueue' API call. Completing a QueryTask will unblock the client call to 'QueryWorkflow'
- # API and return the query result to client as a response to 'QueryWorkflow' API call.
- rpc :RespondQueryTaskCompleted, ::Temporal::Api::WorkflowService::V1::RespondQueryTaskCompletedRequest, ::Temporal::Api::WorkflowService::V1::RespondQueryTaskCompletedResponse
- # ResetStickyTaskQueue resets the sticky task queue related information in mutable state of a given workflow.
+ rpc :GetSearchAttributes, ::Temporalio::Api::WorkflowService::V1::GetSearchAttributesRequest, ::Temporalio::Api::WorkflowService::V1::GetSearchAttributesResponse
+ # RespondQueryTaskCompleted is called by workers to complete queries which were delivered on
+ # the `query` (not `queries`) field of a `PollWorkflowTaskQueueResponse`.
+ #
+ # Completing the query will unblock the corresponding client call to `QueryWorkflow` and return
+ # the query result a response.
+ rpc :RespondQueryTaskCompleted, ::Temporalio::Api::WorkflowService::V1::RespondQueryTaskCompletedRequest, ::Temporalio::Api::WorkflowService::V1::RespondQueryTaskCompletedResponse
+ # ResetStickyTaskQueue resets the sticky task queue related information in the mutable state of
+ # a given workflow. This is prudent for workers to perform if a workflow has been paged out of
+ # their cache.
+ #
# Things cleared are:
# 1. StickyTaskQueue
# 2. StickyScheduleToStartTimeout
- rpc :ResetStickyTaskQueue, ::Temporal::Api::WorkflowService::V1::ResetStickyTaskQueueRequest, ::Temporal::Api::WorkflowService::V1::ResetStickyTaskQueueResponse
- # QueryWorkflow returns query result for a specified workflow execution
- rpc :QueryWorkflow, ::Temporal::Api::WorkflowService::V1::QueryWorkflowRequest, ::Temporal::Api::WorkflowService::V1::QueryWorkflowResponse
+ rpc :ResetStickyTaskQueue, ::Temporalio::Api::WorkflowService::V1::ResetStickyTaskQueueRequest, ::Temporalio::Api::WorkflowService::V1::ResetStickyTaskQueueResponse
+ # QueryWorkflow requests a query be executed for a specified workflow execution.
+ rpc :QueryWorkflow, ::Temporalio::Api::WorkflowService::V1::QueryWorkflowRequest, ::Temporalio::Api::WorkflowService::V1::QueryWorkflowResponse
# DescribeWorkflowExecution returns information about the specified workflow execution.
- rpc :DescribeWorkflowExecution, ::Temporal::Api::WorkflowService::V1::DescribeWorkflowExecutionRequest, ::Temporal::Api::WorkflowService::V1::DescribeWorkflowExecutionResponse
- # DescribeTaskQueue returns information about the target task queue, right now this API returns the
- # pollers which polled this task queue in last few minutes.
- rpc :DescribeTaskQueue, ::Temporal::Api::WorkflowService::V1::DescribeTaskQueueRequest, ::Temporal::Api::WorkflowService::V1::DescribeTaskQueueResponse
+ rpc :DescribeWorkflowExecution, ::Temporalio::Api::WorkflowService::V1::DescribeWorkflowExecutionRequest, ::Temporalio::Api::WorkflowService::V1::DescribeWorkflowExecutionResponse
+ # DescribeTaskQueue returns information about the target task queue.
+ rpc :DescribeTaskQueue, ::Temporalio::Api::WorkflowService::V1::DescribeTaskQueueRequest, ::Temporalio::Api::WorkflowService::V1::DescribeTaskQueueResponse
# GetClusterInfo returns information about temporal cluster
- rpc :GetClusterInfo, ::Temporal::Api::WorkflowService::V1::GetClusterInfoRequest, ::Temporal::Api::WorkflowService::V1::GetClusterInfoResponse
- rpc :ListTaskQueuePartitions, ::Temporal::Api::WorkflowService::V1::ListTaskQueuePartitionsRequest, ::Temporal::Api::WorkflowService::V1::ListTaskQueuePartitionsResponse
+ rpc :GetClusterInfo, ::Temporalio::Api::WorkflowService::V1::GetClusterInfoRequest, ::Temporalio::Api::WorkflowService::V1::GetClusterInfoResponse
+ # GetSystemInfo returns information about the system.
+ rpc :GetSystemInfo, ::Temporalio::Api::WorkflowService::V1::GetSystemInfoRequest, ::Temporalio::Api::WorkflowService::V1::GetSystemInfoResponse
+ rpc :ListTaskQueuePartitions, ::Temporalio::Api::WorkflowService::V1::ListTaskQueuePartitionsRequest, ::Temporalio::Api::WorkflowService::V1::ListTaskQueuePartitionsResponse
+ # Creates a new schedule.
+ # (-- api-linter: core::0133::method-signature=disabled
+ # aip.dev/not-precedent: CreateSchedule doesn't follow Google API format --)
+ # (-- api-linter: core::0133::response-message-name=disabled
+ # aip.dev/not-precedent: CreateSchedule doesn't follow Google API format --)
+ # (-- api-linter: core::0133::http-uri-parent=disabled
+ # aip.dev/not-precedent: CreateSchedule doesn't follow Google API format --)
+ rpc :CreateSchedule, ::Temporalio::Api::WorkflowService::V1::CreateScheduleRequest, ::Temporalio::Api::WorkflowService::V1::CreateScheduleResponse
+ # Returns the schedule description and current state of an existing schedule.
+ rpc :DescribeSchedule, ::Temporalio::Api::WorkflowService::V1::DescribeScheduleRequest, ::Temporalio::Api::WorkflowService::V1::DescribeScheduleResponse
+ # Changes the configuration or state of an existing schedule.
+ # (-- api-linter: core::0134::response-message-name=disabled
+ # aip.dev/not-precedent: UpdateSchedule RPC doesn't follow Google API format. --)
+ # (-- api-linter: core::0134::method-signature=disabled
+ # aip.dev/not-precedent: UpdateSchedule RPC doesn't follow Google API format. --)
+ rpc :UpdateSchedule, ::Temporalio::Api::WorkflowService::V1::UpdateScheduleRequest, ::Temporalio::Api::WorkflowService::V1::UpdateScheduleResponse
+ # Makes a specific change to a schedule or triggers an immediate action.
+ # (-- api-linter: core::0134::synonyms=disabled
+ # aip.dev/not-precedent: we have both patch and update. --)
+ rpc :PatchSchedule, ::Temporalio::Api::WorkflowService::V1::PatchScheduleRequest, ::Temporalio::Api::WorkflowService::V1::PatchScheduleResponse
+ # Lists matching times within a range.
+ rpc :ListScheduleMatchingTimes, ::Temporalio::Api::WorkflowService::V1::ListScheduleMatchingTimesRequest, ::Temporalio::Api::WorkflowService::V1::ListScheduleMatchingTimesResponse
+ # Deletes a schedule, removing it from the system.
+ # (-- api-linter: core::0135::method-signature=disabled
+ # aip.dev/not-precedent: DeleteSchedule doesn't follow Google API format --)
+ # (-- api-linter: core::0135::response-message-name=disabled
+ # aip.dev/not-precedent: DeleteSchedule doesn't follow Google API format --)
+ rpc :DeleteSchedule, ::Temporalio::Api::WorkflowService::V1::DeleteScheduleRequest, ::Temporalio::Api::WorkflowService::V1::DeleteScheduleResponse
+ # List all schedules in a namespace.
+ rpc :ListSchedules, ::Temporalio::Api::WorkflowService::V1::ListSchedulesRequest, ::Temporalio::Api::WorkflowService::V1::ListSchedulesResponse
+ # Allows users to specify sets of worker build id versions on a per task queue basis. Versions
+ # are ordered, and may be either compatible with some extant version, or a new incompatible
+ # version, forming sets of ids which are incompatible with each other, but whose contained
+ # members are compatible with one another.
+ #
+ # (-- api-linter: core::0134::response-message-name=disabled
+ # aip.dev/not-precedent: UpdateWorkerBuildIdCompatibility RPC doesn't follow Google API format. --)
+ # (-- api-linter: core::0134::method-signature=disabled
+ # aip.dev/not-precedent: UpdateWorkerBuildIdCompatibility RPC doesn't follow Google API format. --)
+ rpc :UpdateWorkerBuildIdCompatibility, ::Temporalio::Api::WorkflowService::V1::UpdateWorkerBuildIdCompatibilityRequest, ::Temporalio::Api::WorkflowService::V1::UpdateWorkerBuildIdCompatibilityResponse
+ # Fetches the worker build id versioning sets for some task queue and related metadata.
+ rpc :GetWorkerBuildIdCompatibility, ::Temporalio::Api::WorkflowService::V1::GetWorkerBuildIdCompatibilityRequest, ::Temporalio::Api::WorkflowService::V1::GetWorkerBuildIdCompatibilityResponse
+ # Invokes the specified update function on user workflow code.
+ # (-- api-linter: core::0134=disabled
+ # aip.dev/not-precedent: UpdateWorkflowExecution doesn't follow Google API format --)
+ rpc :UpdateWorkflowExecution, ::Temporalio::Api::WorkflowService::V1::UpdateWorkflowExecutionRequest, ::Temporalio::Api::WorkflowService::V1::UpdateWorkflowExecutionResponse
+ # Polls a workflow execution for the outcome of a workflow execution update
+ # previously issued through the UpdateWorkflowExecution RPC. The effective
+ # timeout on this call will be shorter of the the caller-supplied gRPC
+ # timeout and the server's configured long-poll timeout.
+ # (-- api-linter: core::0134=disabled
+ # aip.dev/not-precedent: UpdateWorkflowExecution doesn't follow Google API format --)
+ rpc :PollWorkflowExecutionUpdate, ::Temporalio::Api::WorkflowService::V1::PollWorkflowExecutionUpdateRequest, ::Temporalio::Api::WorkflowService::V1::PollWorkflowExecutionUpdateResponse
+ # StartBatchOperation starts a new batch operation
+ rpc :StartBatchOperation, ::Temporalio::Api::WorkflowService::V1::StartBatchOperationRequest, ::Temporalio::Api::WorkflowService::V1::StartBatchOperationResponse
+ # StopBatchOperation stops a batch operation
+ rpc :StopBatchOperation, ::Temporalio::Api::WorkflowService::V1::StopBatchOperationRequest, ::Temporalio::Api::WorkflowService::V1::StopBatchOperationResponse
+ # DescribeBatchOperation returns the information about a batch operation
+ rpc :DescribeBatchOperation, ::Temporalio::Api::WorkflowService::V1::DescribeBatchOperationRequest, ::Temporalio::Api::WorkflowService::V1::DescribeBatchOperationResponse
+ # ListBatchOperations returns a list of batch operations
+ rpc :ListBatchOperations, ::Temporalio::Api::WorkflowService::V1::ListBatchOperationsRequest, ::Temporalio::Api::WorkflowService::V1::ListBatchOperationsResponse
end
Stub = Service.rpc_stub_class
diff --git a/lib/temporal.rb b/lib/temporal.rb
index e580192f..b9f49d55 100644
--- a/lib/temporal.rb
+++ b/lib/temporal.rb
@@ -8,6 +8,7 @@
require 'temporal/metrics'
require 'temporal/json'
require 'temporal/errors'
+require 'temporal/schedule'
require 'temporal/workflow/errors'
module Temporal
@@ -17,17 +18,42 @@ module Temporal
:start_workflow,
:schedule_workflow,
:register_namespace,
+ :describe_namespace,
+ :list_namespaces,
:signal_workflow,
+ :query_workflow,
:await_workflow_result,
:reset_workflow,
:terminate_workflow,
:fetch_workflow_execution_info,
:complete_activity,
- :fail_activity
+ :fail_activity,
+ :list_open_workflow_executions,
+ :list_closed_workflow_executions,
+ :query_workflow_executions,
+ :count_workflow_executions,
+ :add_custom_search_attributes,
+ :list_custom_search_attributes,
+ :remove_custom_search_attributes,
+ :connection,
+ :list_schedules,
+ :describe_schedule,
+ :create_schedule,
+ :delete_schedule,
+ :update_schedule,
+ :trigger_schedule,
+ :pause_schedule,
+ :unpause_schedule,
+ :get_workflow_history,
+ :get_workflow_history_json,
+ :get_workflow_history_protobuf
class << self
def configure(&block)
yield config
+ # Reset the singleton client after configuration was altered to ensure
+ # it is initialized with the latest attributes
+ @default_client = nil
end
def configuration
@@ -44,11 +70,11 @@ def metrics
end
private
-
+
def default_client
@default_client ||= Client.new(config)
end
-
+
def config
@config ||= Configuration.new
end
diff --git a/lib/temporal/activity.rb b/lib/temporal/activity.rb
index a3a726af..d5524a0a 100644
--- a/lib/temporal/activity.rb
+++ b/lib/temporal/activity.rb
@@ -1,4 +1,5 @@
require 'temporal/activity/workflow_convenience_methods'
+require 'temporal/callable'
require 'temporal/concerns/executable'
require 'temporal/errors'
@@ -9,7 +10,9 @@ class Activity
def self.execute_in_context(context, input)
activity = new(context)
- activity.execute(*input)
+ callable = Temporal::Callable.new(method: activity.method(:execute))
+
+ callable.call(input)
end
def initialize(context)
diff --git a/lib/temporal/activity/context.rb b/lib/temporal/activity/context.rb
index 2c65c5af..dd330520 100644
--- a/lib/temporal/activity/context.rb
+++ b/lib/temporal/activity/context.rb
@@ -7,12 +7,21 @@
module Temporal
class Activity
class Context
- def initialize(connection, metadata)
+ def initialize(connection, metadata, config, heartbeat_thread_pool)
@connection = connection
@metadata = metadata
+ @config = config
+ @heartbeat_thread_pool = heartbeat_thread_pool
+ @last_heartbeat_details = [] # an array to differentiate nil hearbeat from no heartbeat queued
+ @heartbeat_check_scheduled = nil
+ @heartbeat_mutex = Mutex.new
@async = false
+ @cancel_requested = false
+ @last_heartbeat_throttled = false
end
+ attr_reader :heartbeat_check_scheduled, :cancel_requested, :last_heartbeat_throttled
+
def async
@async = true
end
@@ -31,8 +40,48 @@ def async_token
end
def heartbeat(details = nil)
- logger.debug("Activity heartbeat", metadata.to_h)
- connection.record_activity_task_heartbeat(task_token: task_token, details: details)
+ logger.debug('Activity heartbeat', metadata.to_h)
+ # Heartbeat throttling limits the number of calls made to Temporal server, reducing load on the server
+ # and improving activity performance. The first heartbeat in an activity will always be sent immediately.
+ # After that, a timer is scheduled on a background thread. While this check heartbeat thread is scheduled,
+ # heartbeats will not be directly sent to the server, but rather the value will be saved for later. When
+ # this timer fires and the thread resumes, it will send any heartbeats that came in while waiting, and
+ # begin the process over again.
+ #
+ # The interval is determined by the following criteria:
+ # - if a heartbeat timeout is set, 80% of it
+ # - or if there is no heartbeat timeout set, use the configuration for default_heartbeat_throttle_interval
+ # - any duration is capped by the max_heartbeat_throttle_interval configuration
+ #
+ # Example:
+ # Assume a heartbeat timeout of 10s
+ # Throttle interval will be 8s, below the 60s maximum interval cap
+ # Assume the following timeline:
+ # t = 0, heartbeat, sent, timer scheduled for 8s
+ # t = 1, heartbeat, saved
+ # t = 6, heartbeat, saved
+ # t = 8, timer wakes up, sends the saved heartbeat from t = 6, new timer scheduled for 16s
+ # ... no heartbeats
+ # t = 16, timer wakes up, no saved hearbeat to send, no new timer scheduled
+ # t = 20, heartbeat, sent, timer scheduled for 28s
+ # ...
+
+ heartbeat_mutex.synchronize do
+ if heartbeat_check_scheduled.nil?
+ send_heartbeat(details)
+ @last_heartbeat_details = []
+ @last_heartbeat_throttled = false
+ @heartbeat_check_scheduled = schedule_check_heartbeat(heartbeat_throttle_interval)
+ else
+ logger.debug('Throttling heartbeat for sending later', metadata.to_h)
+ @last_heartbeat_details = [details]
+ @last_heartbeat_throttled = true
+ end
+ end
+
+ # Return back the context so that .cancel_requested works similarly to before when the
+ # GRPC response was returned back directly
+ self
end
def heartbeat_details
@@ -56,13 +105,69 @@ def headers
metadata.headers
end
+ # The name of the activity's class. In a dynamic Activity, it may be the name
+ # of a class or a key to an executor you want to delegate to.
+ def name
+ metadata.name
+ end
+
private
- attr_reader :connection, :metadata
+ attr_reader :connection, :metadata, :heartbeat_thread_pool, :config, :heartbeat_mutex, :last_heartbeat_details
def task_token
metadata.task_token
end
+
+ def heartbeat_throttle_interval
+ # This is a port of logic in the Go SDK
+ # https://github.com/temporalio/sdk-go/blob/eaa3802876de77500164f80f378559c51d6bb0e2/internal/internal_task_handlers.go#L1990
+ interval = if metadata.heartbeat_timeout > 0
+ metadata.heartbeat_timeout * 0.8
+ else
+ config.timeouts[:default_heartbeat_throttle_interval]
+ end
+
+ [interval, config.timeouts[:max_heartbeat_throttle_interval]].min
+ end
+
+ def send_heartbeat(details)
+ begin
+ response = connection.record_activity_task_heartbeat(
+ namespace: metadata.namespace,
+ task_token: task_token,
+ details: details)
+ if response.cancel_requested
+ logger.info('Activity has been canceled', metadata.to_h)
+ @cancel_requested = true
+ end
+ rescue => error
+ Temporal::ErrorHandler.handle(error, config, metadata: metadata)
+ raise
+ end
+ end
+
+ def schedule_check_heartbeat(delay)
+ return nil if delay <= 0
+
+ heartbeat_thread_pool.schedule([metadata.workflow_run_id, metadata.id, metadata.attempt], delay) do
+ details = heartbeat_mutex.synchronize do
+ @heartbeat_check_scheduled = nil
+ # Check to see if there is a saved heartbeat. If heartbeat was not called while this was waiting,
+ # this will be empty and there's no need to send anything or to scheduled another heartbeat
+ # check.
+ last_heartbeat_details
+ end
+ begin
+ unless details.empty?
+ heartbeat(details.first)
+ end
+ rescue
+ # Can swallow any errors here since this only runs on a background thread. Any error will be
+ # sent to the error handler above in send_heartbeat.
+ end
+ end
+ end
end
end
end
diff --git a/lib/temporal/activity/poller.rb b/lib/temporal/activity/poller.rb
index c07eba7e..859fb688 100644
--- a/lib/temporal/activity/poller.rb
+++ b/lib/temporal/activity/poller.rb
@@ -1,14 +1,18 @@
-require 'temporal/connection'
-require 'temporal/thread_pool'
-require 'temporal/middleware/chain'
require 'temporal/activity/task_processor'
+require 'temporal/connection'
require 'temporal/error_handler'
+require 'temporal/metric_keys'
+require 'temporal/middleware/chain'
+require 'temporal/scheduled_thread_pool'
+require 'temporal/thread_pool'
module Temporal
class Activity
class Poller
DEFAULT_OPTIONS = {
- thread_pool_size: 20
+ thread_pool_size: 20,
+ poll_retry_seconds: 0,
+ max_tasks_per_second: 0 # unlimited
}.freeze
def initialize(namespace, task_queue, activity_lookup, config, middleware = [], options = {})
@@ -36,8 +40,13 @@ def cancel_pending_requests
end
def wait
+ if !shutting_down?
+ raise "Activity poller waiting for shutdown completion without being in shutting_down state!"
+ end
+
thread.join
thread_pool.shutdown
+ heartbeat_thread_pool.shutdown
end
private
@@ -53,6 +62,9 @@ def shutting_down?
end
def poll_loop
+ # Prevent the poller thread from silently dying
+ Thread.current.abort_on_exception = true
+
last_poll_time = Time.now
metrics_tags = { namespace: namespace, task_queue: task_queue }.freeze
@@ -62,11 +74,17 @@ def poll_loop
return if shutting_down?
time_diff_ms = ((Time.now - last_poll_time) * 1000).round
- Temporal.metrics.timing('activity_poller.time_since_last_poll', time_diff_ms, metrics_tags)
+ Temporal.metrics.timing(Temporal::MetricKeys::ACTIVITY_POLLER_TIME_SINCE_LAST_POLL, time_diff_ms, metrics_tags)
Temporal.logger.debug("Polling activity task queue", { namespace: namespace, task_queue: task_queue })
task = poll_for_task
last_poll_time = Time.now
+
+ Temporal.metrics.increment(
+ Temporal::MetricKeys::ACTIVITY_POLLER_POLL_COMPLETED,
+ metrics_tags.merge(received_task: (!task.nil?).to_s)
+ )
+
next unless task&.activity_type
thread_pool.schedule { process(task) }
@@ -74,11 +92,17 @@ def poll_loop
end
def poll_for_task
- connection.poll_activity_task_queue(namespace: namespace, task_queue: task_queue)
+ connection.poll_activity_task_queue(namespace: namespace, task_queue: task_queue,
+ max_tasks_per_second: max_tasks_per_second)
+ rescue ::GRPC::Cancelled
+ # We're shutting down and we've already reported that in the logs
+ nil
rescue StandardError => error
Temporal.logger.error("Unable to poll activity task queue", { namespace: namespace, task_queue: task_queue, error: error.inspect })
- Temporal::ErrorHandler.handle(error)
+ Temporal::ErrorHandler.handle(error, config)
+
+ sleep(poll_retry_seconds)
nil
end
@@ -86,11 +110,39 @@ def poll_for_task
def process(task)
middleware_chain = Middleware::Chain.new(middleware)
- TaskProcessor.new(task, namespace, activity_lookup, middleware_chain, config).process
+ TaskProcessor.new(task, task_queue, namespace, activity_lookup, middleware_chain, config, heartbeat_thread_pool).process
+ end
+
+ def poll_retry_seconds
+ @options[:poll_retry_seconds]
+ end
+
+ def max_tasks_per_second
+ @options[:max_tasks_per_second]
end
def thread_pool
- @thread_pool ||= ThreadPool.new(options[:thread_pool_size])
+ @thread_pool ||= ThreadPool.new(
+ options[:thread_pool_size],
+ @config,
+ {
+ pool_name: 'activity_task_poller',
+ namespace: namespace,
+ task_queue: task_queue
+ }
+ )
+ end
+
+ def heartbeat_thread_pool
+ @heartbeat_thread_pool ||= ScheduledThreadPool.new(
+ options[:thread_pool_size],
+ @config,
+ {
+ pool_name: 'heartbeat',
+ namespace: namespace,
+ task_queue: task_queue
+ }
+ )
end
end
end
diff --git a/lib/temporal/activity/task_processor.rb b/lib/temporal/activity/task_processor.rb
index 173a2bc9..ef20780b 100644
--- a/lib/temporal/activity/task_processor.rb
+++ b/lib/temporal/activity/task_processor.rb
@@ -2,59 +2,72 @@
require 'temporal/error_handler'
require 'temporal/errors'
require 'temporal/activity/context'
-require 'temporal/concerns/payloads'
require 'temporal/connection/retryer'
require 'temporal/connection'
+require 'temporal/metric_keys'
module Temporal
class Activity
class TaskProcessor
- include Concerns::Payloads
-
- def initialize(task, namespace, activity_lookup, middleware_chain, config)
+ def initialize(task, task_queue, namespace, activity_lookup, middleware_chain, config, heartbeat_thread_pool)
@task = task
+ @task_queue = task_queue
@namespace = namespace
- @metadata = Metadata.generate(Metadata::ACTIVITY_TYPE, task, namespace)
+ @metadata = Metadata.generate_activity_metadata(task, namespace, config.converter)
@task_token = task.task_token
@activity_name = task.activity_type.name
@activity_class = activity_lookup.find(activity_name)
@middleware_chain = middleware_chain
@config = config
+ @heartbeat_thread_pool = heartbeat_thread_pool
end
def process
start_time = Time.now
Temporal.logger.debug("Processing Activity task", metadata.to_h)
- Temporal.metrics.timing('activity_task.queue_time', queue_time_ms, activity: activity_name)
+ Temporal.metrics.timing(Temporal::MetricKeys::ACTIVITY_TASK_QUEUE_TIME, queue_time_ms, metric_tags)
- context = Activity::Context.new(connection, metadata)
+ context = Activity::Context.new(connection, metadata, config, heartbeat_thread_pool)
if !activity_class
raise ActivityNotRegistered, 'Activity is not registered with this worker'
end
result = middleware_chain.invoke(metadata) do
- activity_class.execute_in_context(context, from_payloads(task.input))
+ activity_class.execute_in_context(context, config.converter.from_payloads(task.input))
end
# Do not complete asynchronous activities, these should be completed manually
respond_completed(result) unless context.async?
rescue StandardError, ScriptError => error
- Temporal::ErrorHandler.handle(error, metadata: metadata)
+ Temporal::ErrorHandler.handle(error, config, metadata: metadata)
respond_failed(error)
ensure
+ unless context&.heartbeat_check_scheduled.nil?
+ heartbeat_thread_pool.cancel(context.heartbeat_check_scheduled)
+ end
+
time_diff_ms = ((Time.now - start_time) * 1000).round
- Temporal.metrics.timing('activity_task.latency', time_diff_ms, activity: activity_name)
+ Temporal.metrics.timing(Temporal::MetricKeys::ACTIVITY_TASK_LATENCY, time_diff_ms, metric_tags)
Temporal.logger.debug("Activity task processed", metadata.to_h.merge(execution_time: time_diff_ms))
end
+ def metric_tags
+ {
+ activity: activity_name,
+ namespace: namespace,
+ task_queue: task_queue,
+ workflow: metadata.workflow_name
+ }
+ end
+
private
- attr_reader :task, :namespace, :task_token, :activity_name, :activity_class,
- :middleware_chain, :metadata, :config
-
+ attr_reader :task, :task_queue, :namespace, :task_token, :activity_name, :activity_class,
+ :middleware_chain, :metadata, :config, :heartbeat_thread_pool
+
def connection
@connection ||= Temporal::Connection.generate(config.for_connection)
end
@@ -71,12 +84,12 @@ def respond_completed(result)
Temporal.logger.debug("Failed to report activity task completion, retrying", metadata.to_h)
end
Temporal::Connection::Retryer.with_retries(on_retry: log_retry) do
- connection.respond_activity_task_completed(task_token: task_token, result: result)
+ connection.respond_activity_task_completed(namespace: namespace, task_token: task_token, result: result)
end
rescue StandardError => error
Temporal.logger.error("Unable to complete Activity", metadata.to_h.merge(error: error.inspect))
- Temporal::ErrorHandler.handle(error, metadata: metadata)
+ Temporal::ErrorHandler.handle(error, config, metadata: metadata)
end
def respond_failed(error)
@@ -85,12 +98,12 @@ def respond_failed(error)
Temporal.logger.debug("Failed to report activity task failure, retrying", metadata.to_h)
end
Temporal::Connection::Retryer.with_retries(on_retry: log_retry) do
- connection.respond_activity_task_failed(task_token: task_token, exception: error)
+ connection.respond_activity_task_failed(namespace: namespace, task_token: task_token, exception: error)
end
rescue StandardError => error
Temporal.logger.error("Unable to fail Activity task", metadata.to_h.merge(error: error.inspect))
- Temporal::ErrorHandler.handle(error, metadata: metadata)
+ Temporal::ErrorHandler.handle(error, config, metadata: metadata)
end
end
end
diff --git a/lib/temporal/callable.rb b/lib/temporal/callable.rb
new file mode 100644
index 00000000..3bec64fd
--- /dev/null
+++ b/lib/temporal/callable.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Temporal
+ class Callable
+ def initialize(method:)
+ @method = method
+ end
+
+ def call(input)
+ if input.is_a?(Array) && input.last.instance_of?(Hash)
+ *args, kwargs = input
+
+ @method.call(*args, **kwargs)
+ else
+ @method.call(*input)
+ end
+ end
+ end
+end
diff --git a/lib/temporal/capabilities.rb b/lib/temporal/capabilities.rb
new file mode 100644
index 00000000..644aac31
--- /dev/null
+++ b/lib/temporal/capabilities.rb
@@ -0,0 +1,30 @@
+require 'temporal/errors'
+
+module Temporal
+ class Capabilities
+ def initialize(config)
+ @config = config
+ @sdk_metadata = nil
+ end
+
+ def sdk_metadata
+ set_capabilities if @sdk_metadata.nil?
+
+ @sdk_metadata
+ end
+
+ private
+
+ def set_capabilities
+ connection = Temporal::Connection.generate(@config.for_connection)
+ system_info = connection.get_system_info
+
+ @sdk_metadata = system_info&.capabilities&.sdk_metadata || false
+
+ Temporal.logger.debug(
+ "Connected to Temporal server running version #{system_info.server_version}. " \
+ "SDK Metadata supported: #{@sdk_metadata}"
+ )
+ end
+ end
+end
diff --git a/lib/temporal/client.rb b/lib/temporal/client.rb
index d2380855..c2e83e1f 100644
--- a/lib/temporal/client.rb
+++ b/lib/temporal/client.rb
@@ -1,44 +1,120 @@
+require 'json'
require 'temporal/execution_options'
require 'temporal/connection'
require 'temporal/activity'
require 'temporal/activity/async_token'
require 'temporal/workflow'
+require 'temporal/workflow/context_helpers'
require 'temporal/workflow/history'
+require 'temporal/workflow/history/serialization'
require 'temporal/workflow/execution_info'
+require 'temporal/workflow/executions'
+require 'temporal/workflow/status'
require 'temporal/reset_strategy'
module Temporal
class Client
def initialize(config)
@config = config
+ @converter = config.converter
end
- def start_workflow(workflow, *input, **args)
- options = args.delete(:options) || {}
+ # Start a workflow with an optional signal
+ #
+ # If options[:signal_name] is specified, Temporal will atomically start a new workflow and
+ # signal it or signal a running workflow (matching a specified options[:workflow_id])
+ #
+ # @param workflow [Temporal::Workflow, String] workflow class or name. When a workflow class
+ # is passed, its config (namespace, task_queue, timeouts, etc) will be used
+ # @param input [any] arguments to be passed to workflow's #execute method
+ # @param args [Hash] keyword arguments to be passed to workflow's #execute method
+ # @param options [Hash, nil] optional overrides
+ # @option options [String] :workflow_id
+ # @option options [Symbol] :workflow_id_reuse_policy check Temporal::Connection::GRPC::WORKFLOW_ID_REUSE_POLICY
+ # @option options [String] :name workflow name
+ # @option options [String] :namespace
+ # @option options [String] :task_queue
+ # @option options [String] :signal_name corresponds to the 'signal' argument to signal_workflow. Required if
+ # options[:signal_input] is specified.
+ # @option options [String, Array, nil] :signal_input corresponds to the 'input' argument to signal_workflow
+ # @option options [Hash] :retry_policy check Temporal::RetryPolicy for available options
+ # @option options [Hash] :timeouts check Temporal::Configuration::DEFAULT_TIMEOUTS
+ # @option options [Hash] :headers
+ # @option options [Hash] :search_attributes
+ # @option options [Integer] :start_delay determines the amount of seconds to wait before initiating a Workflow
+ #
+ # @return [String] workflow's run ID
+ def start_workflow(workflow, *input, options: {}, **args)
input << args unless args.empty?
+ signal_name = options.delete(:signal_name)
+ signal_input = options.delete(:signal_input)
+
execution_options = ExecutionOptions.new(workflow, options, config.default_execution_options)
workflow_id = options[:workflow_id] || SecureRandom.uuid
- response = connection.start_workflow_execution(
- namespace: execution_options.namespace,
- workflow_id: workflow_id,
- workflow_name: execution_options.name,
- task_queue: execution_options.task_queue,
- input: input,
- execution_timeout: execution_options.timeouts[:execution],
- # If unspecified, individual runs should have the full time for the execution (which includes retries).
- run_timeout: execution_options.timeouts[:run] || execution_options.timeouts[:execution],
- task_timeout: execution_options.timeouts[:task],
- workflow_id_reuse_policy: options[:workflow_id_reuse_policy],
- headers: execution_options.headers
- )
+ if signal_name.nil? && signal_input.nil?
+ response = connection.start_workflow_execution(
+ namespace: execution_options.namespace,
+ workflow_id: workflow_id,
+ workflow_name: execution_options.name,
+ task_queue: execution_options.task_queue,
+ input: input,
+ execution_timeout: execution_options.timeouts[:execution],
+ # If unspecified, individual runs should have the full time for the execution (which includes retries).
+ run_timeout: compute_run_timeout(execution_options),
+ task_timeout: execution_options.timeouts[:task],
+ workflow_id_reuse_policy: options[:workflow_id_reuse_policy],
+ headers: config.header_propagator_chain.inject(execution_options.headers),
+ memo: execution_options.memo,
+ search_attributes: Workflow::Context::Helpers.process_search_attributes(execution_options.search_attributes),
+ start_delay: execution_options.start_delay
+ )
+ else
+ raise ArgumentError, 'If signal_input is provided, you must also provide signal_name' if signal_name.nil?
+
+ response = connection.signal_with_start_workflow_execution(
+ namespace: execution_options.namespace,
+ workflow_id: workflow_id,
+ workflow_name: execution_options.name,
+ task_queue: execution_options.task_queue,
+ input: input,
+ execution_timeout: execution_options.timeouts[:execution],
+ run_timeout: compute_run_timeout(execution_options),
+ task_timeout: execution_options.timeouts[:task],
+ workflow_id_reuse_policy: options[:workflow_id_reuse_policy],
+ headers: config.header_propagator_chain.inject(execution_options.headers),
+ memo: execution_options.memo,
+ search_attributes: Workflow::Context::Helpers.process_search_attributes(execution_options.search_attributes),
+ signal_name: signal_name,
+ signal_input: signal_input,
+ start_delay: execution_options.start_delay
+ )
+ end
response.run_id
end
- def schedule_workflow(workflow, cron_schedule, *input, **args)
- options = args.delete(:options) || {}
+ # Schedule a workflow for a periodic cron-like execution
+ #
+ # @param workflow [Temporal::Workflow, String] workflow class or name. When a workflow class
+ # is passed, its config (namespace, task_queue, timeouts, etc) will be used
+ # @param cron_schedule [String] a cron-style schedule string
+ # @param input [any] arguments to be passed to workflow's #execute method
+ # @param args [Hash] keyword arguments to be pass to workflow's #execute method
+ # @param options [Hash, nil] optional overrides
+ # @option options [String] :workflow_id
+ # @option options [Symbol] :workflow_id_reuse_policy check Temporal::Connection::GRPC::WORKFLOW_ID_REUSE_POLICY
+ # @option options [String] :name workflow name
+ # @option options [String] :namespace
+ # @option options [String] :task_queue
+ # @option options [Hash] :retry_policy check Temporal::RetryPolicy for available options
+ # @option options [Hash] :timeouts check Temporal::Configuration::DEFAULT_TIMEOUTS
+ # @option options [Hash] :headers
+ # @option options [Hash] :search_attributes
+ #
+ # @return [String] workflow's run ID
+ def schedule_workflow(workflow, cron_schedule, *input, options: {}, **args)
input << args unless args.empty?
execution_options = ExecutionOptions.new(workflow, options, config.default_execution_options)
@@ -54,20 +130,53 @@ def schedule_workflow(workflow, cron_schedule, *input, **args)
# Execution timeout is across all scheduled jobs, whereas run is for an individual run.
# This default is here for backward compatibility. Certainly, the run timeout shouldn't be higher
# than the execution timeout.
- run_timeout: execution_options.timeouts[:run] || execution_options.timeouts[:execution],
+ run_timeout: compute_run_timeout(execution_options),
task_timeout: execution_options.timeouts[:task],
workflow_id_reuse_policy: options[:workflow_id_reuse_policy],
- headers: execution_options.headers,
- cron_schedule: cron_schedule
+ headers: config.header_propagator_chain.inject(execution_options.headers),
+ cron_schedule: cron_schedule,
+ memo: execution_options.memo,
+ search_attributes: Workflow::Context::Helpers.process_search_attributes(execution_options.search_attributes),
)
response.run_id
end
- def register_namespace(name, description = nil)
- connection.register_namespace(name: name, description: description)
+ # Register a new Temporal namespace
+ #
+ # @param name [String] name of the new namespace
+ # @param description [String] optional namespace description
+ # @param is_global [Boolean] used to distinguish local namespaces from global namespaces (https://docs.temporal.io/docs/server/namespaces/#global-namespaces)
+ # @param retention_period [Int] optional value which specifies how long Temporal will keep workflows after completing
+ # @param data [Hash] optional key-value map for any customized purpose that can be retreived with describe_namespace
+ def register_namespace(name, description = nil, is_global: false, retention_period: 10, data: nil)
+ connection.register_namespace(name: name, description: description, is_global: is_global, retention_period: retention_period, data: data)
+ end
+
+ # Fetches metadata for a namespace.
+ # @param name [String] name of the namespace
+ # @return [Hash] info deserialized from Temporalio::Api::WorkflowService::V1::DescribeNamespaceResponse
+ def describe_namespace(name)
+ connection.describe_namespace(name: name)
end
+ # Fetches all the namespaces.
+ #
+ # @param page_size [Integer] number of namespace results to return per page.
+ # @param next_page_token [String] a optional pagination token returned by a previous list_namespaces call
+ def list_namespaces(page_size:, next_page_token: "")
+ connection.list_namespaces(page_size: page_size, next_page_token: next_page_token)
+ end
+
+ # Send a signal to a running workflow
+ #
+ # @param workflow [Temporal::Workflow, nil] workflow class or nil
+ # @param signal [String] name of the signal to send
+ # @param workflow_id [String]
+ # @param run_id [String]
+ # @param input [String, Array, nil] optional arguments for the signal
+ # @param namespace [String, nil] if nil, choose the one declared on the workflow class or the
+ # global default
def signal_workflow(workflow, signal, workflow_id, run_id, input = nil, namespace: nil)
execution_options = ExecutionOptions.new(workflow, {}, config.default_execution_options)
@@ -80,15 +189,45 @@ def signal_workflow(workflow, signal, workflow_id, run_id, input = nil, namespac
)
end
- # Long polls for a workflow to be completed and returns whatever the execute function
- # returned. This function times out after 30 seconds and throws Temporal::TimeoutError,
- # not to be confused with Temporal::WorkflowTimedOut which reports that the workflow
- # itself timed out.
- # run_id of nil: await the entire workflow completion. This can span multiple runs
- # in the case where the workflow uses continue-as-new.
- # timeout: seconds to wait for the result. This cannot be longer than 30 seconds because
- # that is the maximum the server supports.
- # namespace: if nil, choose the one declared on the Workflow, or the global default
+ # Issue a query against a running workflow
+ #
+ # @param workflow [Temporal::Workflow, nil] workflow class or nil
+ # @param query [String] name of the query to issue
+ # @param workflow_id [String]
+ # @param run_id [String]
+ # @param args [String, Array, nil] optional arguments for the query
+ # @param namespace [String, nil] if nil, choose the one declared on the workflow class or the
+ # global default
+ # @param query_reject_condition [Symbol] check Temporal::Connection::GRPC::QUERY_REJECT_CONDITION
+ def query_workflow(workflow, query, workflow_id, run_id, *args, namespace: nil, query_reject_condition: nil)
+ execution_options = ExecutionOptions.new(workflow, {}, config.default_execution_options)
+
+ connection.query_workflow(
+ namespace: namespace || execution_options.namespace,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ query: query,
+ args: args,
+ query_reject_condition: query_reject_condition
+ )
+ end
+
+ # Long polls for a workflow to be completed and returns workflow's return value.
+ #
+ # @note This function times out after 30 seconds and throws Temporal::TimeoutError,
+ # not to be confused with `Temporal::WorkflowTimedOut` which reports that the workflow
+ # itself timed out.
+ #
+ # @param workflow [Temporal::Workflow, nil] workflow class or nil
+ # @param workflow_id [String]
+ # @param run_id [String, nil] awaits the entire workflow completion when nil. This can span
+ # multiple runs in the case where the workflow uses continue-as-new.
+ # @param timeout [Integer, nil] seconds to wait for the result. This cannot be longer than 30
+ # seconds because that is the maximum the server supports.
+ # @param namespace [String, nil] if nil, choose the one declared on the workflow class or the
+ # global default
+ #
+ # @return workflow's return value
def await_workflow_result(workflow, workflow_id:, run_id: nil, timeout: nil, namespace: nil)
options = namespace ? {namespace: namespace} : {}
execution_options = ExecutionOptions.new(workflow, options, config.default_execution_options)
@@ -104,7 +243,7 @@ def await_workflow_result(workflow, workflow_id:, run_id: nil, timeout: nil, nam
timeout: timeout || max_timeout,
)
rescue GRPC::DeadlineExceeded => e
- message = if timeout
+ message = if timeout
"Timed out after your specified limit of timeout: #{timeout} seconds"
else
"Timed out after #{max_timeout} seconds, which is the maximum supported amount."
@@ -116,7 +255,7 @@ def await_workflow_result(workflow, workflow_id:, run_id: nil, timeout: nil, nam
case closed_event.type
when 'WORKFLOW_EXECUTION_COMPLETED'
payloads = closed_event.attributes.result
- return ResultConverter.from_result_payloads(payloads)
+ return converter.from_result_payloads(payloads)
when 'WORKFLOW_EXECUTION_TIMED_OUT'
raise Temporal::WorkflowTimedOut
when 'WORKFLOW_EXECUTION_TERMINATED'
@@ -124,7 +263,7 @@ def await_workflow_result(workflow, workflow_id:, run_id: nil, timeout: nil, nam
when 'WORKFLOW_EXECUTION_CANCELED'
raise Temporal::WorkflowCanceled
when 'WORKFLOW_EXECUTION_FAILED'
- raise Temporal::Workflow::Errors.generate_error(closed_event.attributes.failure)
+ raise Temporal::Workflow::Errors.generate_error(closed_event.attributes.failure, converter)
when 'WORKFLOW_EXECUTION_CONTINUED_AS_NEW'
new_run_id = closed_event.attributes.new_execution_run_id
# Throw to let the caller know they're not getting the result
@@ -135,7 +274,26 @@ def await_workflow_result(workflow, workflow_id:, run_id: nil, timeout: nil, nam
end
end
- def reset_workflow(namespace, workflow_id, run_id, strategy: nil, workflow_task_id: nil, reason: 'manual reset')
+ # Reset a workflow
+ #
+ # @note More on resetting a workflow here —
+ # https://docs.temporal.io/tctl-v1/workflow#reset
+ #
+ # @param namespace [String]
+ # @param workflow_id [String]
+ # @param run_id [String]
+ # @param strategy [Symbol, nil] one of the Temporal::ResetStrategy values or `nil` when
+ # passing a workflow_task_id
+ # @param workflow_task_id [Integer, nil] A specific event ID to reset to. The event has to
+ # be of a type WorkflowTaskCompleted, WorkflowTaskFailed or WorkflowTaskTimedOut
+ # @param reason [String] a reset reason to be recorded in workflow's history for reference
+ # @param request_id [String, nil] an idempotency key for the Reset request or `nil` to use
+ # an auto-generated, unique value
+ # @param reset_reapply_type [Symbol] one of the Temporal::ResetReapplyType values. Defaults
+ # to SIGNAL.
+ #
+ # @return [String] run_id of the new workflow execution
+ def reset_workflow(namespace, workflow_id, run_id, strategy: nil, workflow_task_id: nil, reason: 'manual reset', request_id: nil, reset_reapply_type: Temporal::ResetReapplyType::SIGNAL)
# Pick default strategy for backwards-compatibility
strategy ||= :last_workflow_task unless workflow_task_id
@@ -146,19 +304,37 @@ def reset_workflow(namespace, workflow_id, run_id, strategy: nil, workflow_task_
workflow_task_id ||= find_workflow_task(namespace, workflow_id, run_id, strategy)&.id
raise Error, 'Could not find an event to reset to' unless workflow_task_id
+ if request_id.nil?
+ # Generate a request ID if one is not provided.
+ # This is consistent with the Go SDK:
+ # https://github.com/temporalio/sdk-go/blob/e1d76b7c798828302980d483f0981128c97a20c2/internal/internal_workflow_client.go#L952-L972
+
+ request_id = SecureRandom.uuid
+ end
+
response = connection.reset_workflow_execution(
namespace: namespace,
workflow_id: workflow_id,
run_id: run_id,
reason: reason,
- workflow_task_event_id: workflow_task_id
+ workflow_task_event_id: workflow_task_id,
+ request_id: request_id,
+ reset_reapply_type: reset_reapply_type
)
response.run_id
end
+ # Terminate a running workflow
+ #
+ # @param workflow_id [String]
+ # @param namespace [String, nil] use a default namespace when `nil`
+ # @param run_id [String, nil]
+ # @param reason [String, nil] a termination reason to be recorded in workflow's history
+ # for reference
+ # @param details [String, Array, nil] optional details to be stored in history
def terminate_workflow(workflow_id, namespace: nil, run_id: nil, reason: nil, details: nil)
- namespace ||= Temporal.configuration.namespace
+ namespace ||= config.namespace
connection.terminate_workflow_execution(
namespace: namespace,
@@ -169,6 +345,13 @@ def terminate_workflow(workflow_id, namespace: nil, run_id: nil, reason: nil, de
)
end
+ # Fetch workflow's execution info
+ #
+ # @param namespace [String]
+ # @param workflow_id [String]
+ # @param run_id [String]
+ #
+ # @return [Temporal::Workflow::ExecutionInfo] an object containing workflow status and other info
def fetch_workflow_execution_info(namespace, workflow_id, run_id)
response = connection.describe_workflow_execution(
namespace: namespace,
@@ -176,9 +359,14 @@ def fetch_workflow_execution_info(namespace, workflow_id, run_id)
run_id: run_id
)
- Workflow::ExecutionInfo.generate_from(response.workflow_execution_info)
+ Workflow::ExecutionInfo.generate_from(response.workflow_execution_info, converter)
end
+ # Manually complete an activity
+ #
+ # @param async_token [String] an encoded Temporal::Activity::AsyncToken
+ # @param result [String, Array, nil] activity's return value to be stored in history and
+ # passed back to a workflow
def complete_activity(async_token, result = nil)
details = Activity::AsyncToken.decode(async_token)
@@ -191,6 +379,11 @@ def complete_activity(async_token, result = nil)
)
end
+ # Manually fail an activity
+ #
+ # @param async_token [String] an encoded Temporal::Activity::AsyncToken
+ # @param exception [Exception] activity's failure exception to be stored in history and
+ # raised in a workflow
def fail_activity(async_token, exception)
details = Activity::AsyncToken.decode(async_token)
@@ -203,29 +396,220 @@ def fail_activity(async_token, exception)
)
end
- def get_workflow_history(namespace:, workflow_id:, run_id:)
+ # Fetch workflow's execution history
+ #
+ # @param namespace [String]
+ # @param workflow_id [String]
+ # @param run_id [String]
+ #
+ # @return [Temporal::Workflow::History] workflow's execution history
+ def get_workflow_history(namespace: nil, workflow_id:, run_id:)
+ next_page_token = nil
+ events = []
+ loop do
+ response =
+ connection.get_workflow_execution_history(
+ namespace: namespace || config.default_execution_options.namespace,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ next_page_token: next_page_token,
+ )
+ events.concat(response.history.events.to_a)
+ next_page_token = response.next_page_token
+
+ break if next_page_token.empty?
+ end
+
+ Workflow::History.new(events)
+ end
+
+ # Fetch workflow's execution history as JSON. This output can be used for replay testing.
+ #
+ # @param namespace [String]
+ # @param workflow_id [String]
+ # @param run_id [String] optional
+ # @param pretty_print [Boolean] optional
+ #
+ # @return a JSON string representation of the history
+ def get_workflow_history_json(namespace: nil, workflow_id:, run_id: nil, pretty_print: true)
history_response = connection.get_workflow_execution_history(
- namespace: namespace,
+ namespace: namespace || config.default_execution_options.namespace,
workflow_id: workflow_id,
run_id: run_id
)
+ Temporal::Workflow::History::Serialization.to_json(history_response.history)
+ end
- Workflow::History.new(history_response.history.events)
+ # Fetch workflow's execution history as protobuf binary. This output can be used for replay testing.
+ #
+ # @param namespace [String]
+ # @param workflow_id [String]
+ # @param run_id [String] optional
+ #
+ # @return a binary string representation of the history
+ def get_workflow_history_protobuf(namespace: nil, workflow_id:, run_id: nil)
+ history_response = connection.get_workflow_execution_history(
+ namespace: namespace || config.default_execution_options.namespace,
+ workflow_id: workflow_id,
+ run_id: run_id
+ )
+
+ # Protobuf for Ruby unfortunately does not support textproto. Plain binary provides
+ # a less debuggable, but compact option.
+ Temporal::Workflow::History::Serialization.to_protobuf(history_response.history)
end
- class ResultConverter
- extend Concerns::Payloads
+ def list_open_workflow_executions(namespace, from, to = Time.now, filter: {}, next_page_token: nil, max_page_size: nil)
+ validate_filter(filter, :workflow, :workflow_id)
+
+ Temporal::Workflow::Executions.new(converter, connection: connection, status: :open, request_options: { namespace: namespace, from: from, to: to, next_page_token: next_page_token, max_page_size: max_page_size}.merge(filter))
end
- private_constant :ResultConverter
- private
+ def list_closed_workflow_executions(namespace, from, to = Time.now, filter: {}, next_page_token: nil, max_page_size: nil)
+ validate_filter(filter, :status, :workflow, :workflow_id)
+
+ Temporal::Workflow::Executions.new(converter, connection: connection, status: :closed, request_options: { namespace: namespace, from: from, to: to, next_page_token: next_page_token, max_page_size: max_page_size}.merge(filter))
+ end
+
+ def query_workflow_executions(namespace, query, filter: {}, next_page_token: nil, max_page_size: nil)
+ validate_filter(filter, :status, :workflow, :workflow_id)
+
+ Temporal::Workflow::Executions.new(converter, connection: connection, status: :all, request_options: { namespace: namespace, query: query, next_page_token: next_page_token, max_page_size: max_page_size }.merge(filter))
+ end
+
+ # Count the number of workflows matching the provided query
+ #
+ # @param namespace [String]
+ # @param query [String]
+ #
+ # @return [Integer] an integer count of workflows matching the query
+ def count_workflow_executions(namespace, query: nil)
+ response = connection.count_workflow_executions(namespace: namespace, query: query)
+ response.count
+ end
- attr_reader :config
+ # @param attributes [Hash[String, Symbol]] name to symbol for type, see INDEXED_VALUE_TYPE above
+ # @param namespace String, required for SQL enhanced visibility, ignored for elastic search
+ def add_custom_search_attributes(attributes, namespace: nil)
+ connection.add_custom_search_attributes(attributes, namespace || config.default_execution_options.namespace)
+ end
+
+ # @param namespace String, required for SQL enhanced visibility, ignored for elastic search
+ # @return Hash[String, Symbol] name to symbol for type, see INDEXED_VALUE_TYPE above
+ def list_custom_search_attributes(namespace: nil)
+ connection.list_custom_search_attributes(namespace || config.default_execution_options.namespace)
+ end
+
+ # @param attribute_names [Array[String]] Attributes to remove
+ # @param namespace String, required for SQL enhanced visibility, ignored for elastic search
+ def remove_custom_search_attributes(*attribute_names, namespace: nil)
+ connection.remove_custom_search_attributes(attribute_names, namespace || config.default_execution_options.namespace)
+ end
+
+ # List all schedules in a namespace
+ #
+ # @param namespace [String] namespace to list schedules in
+ # @param maximum_page_size [Integer] number of namespace results to return per page.
+ # @param next_page_token [String] a optional pagination token returned by a previous list_namespaces call
+ def list_schedules(namespace, maximum_page_size:, next_page_token: '')
+ connection.list_schedules(namespace: namespace, maximum_page_size: maximum_page_size, next_page_token: next_page_token)
+ end
+
+ # Describe a schedule in a namespace
+ #
+ # @param namespace [String] namespace to list schedules in
+ # @param schedule_id [String] schedule id
+ def describe_schedule(namespace, schedule_id)
+ connection.describe_schedule(namespace: namespace, schedule_id: schedule_id)
+ end
+
+ # Create a new schedule
+ #
+ #
+ # @param namespace [String] namespace to create schedule in
+ # @param schedule_id [String] schedule id
+ # @param schedule [Temporal::Schedule::Schedule] schedule to create
+ # @param trigger_immediately [Boolean] If set, trigger one action to run immediately
+ # @param backfill [Temporal::Schedule::Backfill] If set, run through the backfill schedule and trigger actions.
+ # @param memo [Hash] optional key-value memo map to attach to the schedule
+ # @param search attributes [Hash] optional key-value search attributes to attach to the schedule
+ def create_schedule(
+ namespace,
+ schedule_id,
+ schedule,
+ trigger_immediately: false,
+ backfill: nil,
+ memo: nil,
+ search_attributes: nil
+ )
+ connection.create_schedule(
+ namespace: namespace,
+ schedule_id: schedule_id,
+ schedule: schedule,
+ trigger_immediately: trigger_immediately,
+ backfill: backfill,
+ memo: memo,
+ search_attributes: search_attributes
+ )
+ end
+
+ # Delete a schedule in a namespace
+ #
+ # @param namespace [String] namespace to list schedules in
+ # @param schedule_id [String] schedule id
+ def delete_schedule(namespace, schedule_id)
+ connection.delete_schedule(namespace: namespace, schedule_id: schedule_id)
+ end
+
+ # Update a schedule in a namespace
+ #
+ # @param namespace [String] namespace to list schedules in
+ # @param schedule_id [String] schedule id
+ # @param schedule [Temporal::Schedule::Schedule] schedule to update. All fields in the schedule will be replaced completely by this updated schedule.
+ # @param conflict_token [String] a token that was returned by a previous describe_schedule call. If provided and does not match the current schedule's token, the update will fail.
+ def update_schedule(namespace, schedule_id, schedule, conflict_token: nil)
+ connection.update_schedule(namespace: namespace, schedule_id: schedule_id, schedule: schedule, conflict_token: conflict_token)
+ end
+
+ # Trigger one action of a schedule to run immediately
+ #
+ # @param namespace [String] namespace
+ # @param schedule_id [String] schedule id
+ # @param overlap_policy [Symbol] Should be one of :skip, :buffer_one, :buffer_all, :cancel_other, :terminate_other, :allow_all
+ def trigger_schedule(namespace, schedule_id, overlap_policy: nil)
+ connection.trigger_schedule(namespace: namespace, schedule_id: schedule_id, overlap_policy: overlap_policy)
+ end
+
+ # Pause a schedule so actions will not run
+ #
+ # @param namespace [String] namespace
+ # @param schedule_id [String] schedule id
+ # @param note [String] an optional note to explain why the schedule was paused
+ def pause_schedule(namespace, schedule_id, note: nil)
+ connection.pause_schedule(namespace: namespace, schedule_id: schedule_id, should_pause: true, note: note)
+ end
+
+ # Unpause a schedule so actions will run
+ #
+ # @param namespace [String] namespace
+ # @param schedule_id [String] schedule id
+ # @param note [String] an optional note to explain why the schedule was unpaused
+ def unpause_schedule(namespace, schedule_id, note: nil)
+ connection.pause_schedule(namespace: namespace, schedule_id: schedule_id, should_pause: false, note: note)
+ end
def connection
@connection ||= Temporal::Connection.generate(config.for_connection)
end
+ private
+
+ attr_reader :config, :converter
+
+ def compute_run_timeout(execution_options)
+ execution_options.timeouts[:run] || execution_options.timeouts[:execution]
+ end
+
def find_workflow_task(namespace, workflow_id, run_id, strategy)
history = get_workflow_history(
namespace: namespace,
@@ -252,5 +636,13 @@ def find_workflow_task(namespace, workflow_id, run_id, strategy)
raise ArgumentError, 'Unsupported reset strategy'
end
end
+ def validate_filter(filter, *allowed_filters)
+ if (filter.keys - allowed_filters).length > 0
+ raise ArgumentError, "Allowed filters are: #{allowed_filters}"
+ end
+
+ raise ArgumentError, 'Only one filter is allowed' if filter.size > 1
+ end
+
end
end
diff --git a/lib/temporal/concerns/payloads.rb b/lib/temporal/concerns/payloads.rb
deleted file mode 100644
index 49af8f3f..00000000
--- a/lib/temporal/concerns/payloads.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-module Temporal
- module Concerns
- module Payloads
- def from_payloads(payloads)
- payload_converter.from_payloads(payloads)
- end
-
- def from_payload(payload)
- payload_converter.from_payload(payload)
- end
-
- def from_result_payloads(payloads)
- from_payloads(payloads)&.first
- end
-
- def from_details_payloads(payloads)
- from_payloads(payloads)&.first
- end
-
- def from_signal_payloads(payloads)
- from_payloads(payloads)&.first
- end
-
- def to_payloads(data)
- payload_converter.to_payloads(data)
- end
-
- def to_payload(data)
- payload_converter.to_payload(data)
- end
-
- def to_result_payloads(data)
- to_payloads([data])
- end
-
- def to_details_payloads(data)
- to_payloads([data])
- end
-
- def to_signal_payloads(data)
- to_payloads([data])
- end
-
- private
-
- def payload_converter
- Temporal.configuration.converter
- end
- end
- end
-end
diff --git a/lib/temporal/concerns/typed.rb b/lib/temporal/concerns/typed.rb
index 2a05f144..0b8c6702 100644
--- a/lib/temporal/concerns/typed.rb
+++ b/lib/temporal/concerns/typed.rb
@@ -32,7 +32,7 @@ def input(klass = nil, &block)
private
def generate_struct
- Class.new(Dry::Struct::Value) { transform_keys(&:to_sym) }
+ Class.new(Dry::Struct) { transform_keys(&:to_sym) }
end
end
end
diff --git a/lib/temporal/configuration.rb b/lib/temporal/configuration.rb
index 828c561f..0506b61f 100644
--- a/lib/temporal/configuration.rb
+++ b/lib/temporal/configuration.rb
@@ -1,19 +1,27 @@
+require 'temporal/capabilities'
+require 'temporal/converter_wrapper'
require 'temporal/logger'
require 'temporal/metrics_adapters/null'
+require 'temporal/middleware/header_propagator_chain'
+require 'temporal/middleware/entry'
require 'temporal/connection/converter/payload/nil'
require 'temporal/connection/converter/payload/bytes'
require 'temporal/connection/converter/payload/json'
+require 'temporal/connection/converter/payload/proto_json'
require 'temporal/connection/converter/composite'
+require 'temporal/connection/converter/codec/chain'
module Temporal
class Configuration
- Connection = Struct.new(:type, :host, :port, keyword_init: true)
- Execution = Struct.new(:namespace, :task_queue, :timeouts, :headers, keyword_init: true)
+ Connection = Struct.new(:type, :host, :port, :credentials, :identity, :converter, :connection_options, keyword_init: true)
+ Execution = Struct.new(:namespace, :task_queue, :timeouts, :headers, :search_attributes, keyword_init: true)
- attr_reader :timeouts, :error_handlers
- attr_writer :converter
- attr_accessor :connection_type, :host, :port, :logger, :metrics_adapter, :namespace, :task_queue, :headers
+ attr_reader :timeouts, :error_handlers, :capabilities, :payload_codec
+ attr_accessor :connection_type, :use_error_serialization_v2, :host, :port, :credentials, :identity,
+ :logger, :metrics_adapter, :namespace, :task_queue, :headers, :search_attributes, :header_propagators,
+ :legacy_signals, :no_signals_in_first_task, :connection_options, :log_on_workflow_replay
+ # See https://docs.temporal.io/blog/activity-timeouts/ for general docs.
# We want an infinite execution timeout for cron schedules and other perpetual workflows.
# We choose an 10-year execution timeout because that's the maximum the cassandra DB supports,
# matching the go SDK, see https://github.com/temporalio/sdk-go/blob/d96130dad3d2bc189bc7626543bd5911cc07ff6d/internal/internal_workflow_testsuite.go#L68
@@ -21,11 +29,23 @@ class Configuration
execution: 86_400 * 365 * 10, # End-to-end workflow time, including all recurrences if it's scheduled.
# Time for a single run, excluding retries. Server defaults to execution timeout; we default here as well to be explicit.
run: 86_400 * 365 * 10,
- task: 10, # Workflow task processing time
+ # Workflow task processing time. Workflows should not use the network and should execute very quickly.
+ task: 10,
schedule_to_close: nil, # End-to-end activity time (default: schedule_to_start + start_to_close)
- schedule_to_start: 10, # Queue time for an activity
+ # Max queue time for an activity. Default: none. This is dangerous; most teams don't use.
+ # See # https://docs.temporal.io/blog/activity-timeouts/#schedule-to-start-timeout
+ schedule_to_start: nil,
start_to_close: 30, # Time spent processing an activity
- heartbeat: nil # Max time between heartbeats (off by default)
+ heartbeat: nil, # Max time between heartbeats (off by default)
+ # If a heartbeat timeout is specified, 80% of that value will be used for throttling. If not specified, this
+ # value will be used. This default comes from the Go SDK.
+ # https://github.com/temporalio/sdk-go/blob/eaa3802876de77500164f80f378559c51d6bb0e2/internal/internal_task_handlers.go#L65
+ default_heartbeat_throttle_interval: 30,
+ # Heartbeat throttling interval will always be capped by this value. This default comes from the Go SDK.
+ # https://github.com/temporalio/sdk-go/blob/eaa3802876de77500164f80f378559c51d6bb0e2/internal/internal_task_handlers.go#L66
+ #
+ # To disable heartbeat throttling, set this timeout to 0.
+ max_heartbeat_throttle_interval: 60
}.freeze
DEFAULT_HEADERS = {}.freeze
@@ -35,10 +55,19 @@ class Configuration
payload_converters: [
Temporal::Connection::Converter::Payload::Nil.new,
Temporal::Connection::Converter::Payload::Bytes.new,
- Temporal::Connection::Converter::Payload::JSON.new,
+ Temporal::Connection::Converter::Payload::ProtoJSON.new,
+ Temporal::Connection::Converter::Payload::JSON.new
]
).freeze
+ # The Payload Codec is an optional step that happens between the wire and the Payload Converter:
+ # Temporal Server <--> Wire <--> Payload Codec <--> Payload Converter <--> User code
+ # which can be useful for transformations such as compression and encryption
+ # more info at https://docs.temporal.io/security#payload-codec
+ DEFAULT_PAYLOAD_CODEC = Temporal::Connection::Converter::Codec::Chain.new(
+ payload_codecs: []
+ ).freeze
+
def initialize
@connection_type = :grpc
@logger = Temporal::Logger.new(STDOUT, progname: 'temporal_client')
@@ -48,7 +77,29 @@ def initialize
@task_queue = DEFAULT_TASK_QUEUE
@headers = DEFAULT_HEADERS
@converter = DEFAULT_CONVERTER
+ @payload_codec = DEFAULT_PAYLOAD_CODEC
+ @use_error_serialization_v2 = false
@error_handlers = []
+ @credentials = :this_channel_is_insecure
+ @identity = nil
+ @search_attributes = {}
+ @header_propagators = []
+ @capabilities = Capabilities.new(self)
+ @connection_options = {}
+ # Setting this to true can be useful when debugging workflow code or running replay tests
+ @log_on_workflow_replay = false
+
+ # Signals previously were incorrectly replayed in order within a workflow task window, rather
+ # than at the beginning. Correcting this changes the determinism of any workflow with signals.
+ # This flag exists to force this legacy behavior to gradually roll out the new ordering.
+ # Because this feature depends on the SDK Metadata capability which only became available
+ # in Temporal server 1.20, it is ignored when connected to older versions and effectively
+ # treated as true.
+ @legacy_signals = false
+
+ # This is a legacy behavior that is incorrect, but which existing workflow code may rely on. Only
+ # set to true until you can fix your workflow code.
+ @no_signals_in_first_task = false
end
def on_error(&block)
@@ -67,15 +118,15 @@ def timeouts=(new_timeouts)
@timeouts = DEFAULT_TIMEOUTS.merge(new_timeouts)
end
- def converter
- @converter
- end
-
def for_connection
Connection.new(
type: connection_type,
host: host,
- port: port
+ port: port,
+ credentials: credentials,
+ identity: identity || default_identity,
+ converter: converter,
+ connection_options: connection_options.merge(use_error_serialization_v2: use_error_serialization_v2)
).freeze
end
@@ -84,8 +135,42 @@ def default_execution_options
namespace: namespace,
task_queue: task_list,
timeouts: timeouts,
- headers: headers
+ headers: headers,
+ search_attributes: search_attributes
).freeze
end
+
+ def add_header_propagator(propagator_class, *args)
+ raise 'header propagator must implement `def inject!(headers)`' unless propagator_class.method_defined? :inject!
+
+ @header_propagators << Middleware::Entry.new(propagator_class, args)
+ end
+
+ def header_propagator_chain
+ Middleware::HeaderPropagatorChain.new(header_propagators)
+ end
+
+ def converter
+ @converter_wrapper ||= ConverterWrapper.new(@converter, @payload_codec)
+ end
+
+ def converter=(new_converter)
+ @converter = new_converter
+ @converter_wrapper = nil
+ end
+
+ def payload_codec=(new_codec)
+ @payload_codec = new_codec
+ @converter_wrapper = nil
+ end
+
+ private
+
+ def default_identity
+ hostname = `hostname`
+ pid = Process.pid
+
+ "#{pid}@#{hostname}".freeze
+ end
end
end
diff --git a/lib/temporal/connection.rb b/lib/temporal/connection.rb
index b499ca73..6ee1bcc7 100644
--- a/lib/temporal/connection.rb
+++ b/lib/temporal/connection.rb
@@ -10,12 +10,12 @@ def self.generate(configuration)
connection_class = CLIENT_TYPES_MAP[configuration.type]
host = configuration.host
port = configuration.port
+ credentials = configuration.credentials
+ identity = configuration.identity
+ converter = configuration.converter
+ options = configuration.connection_options
- hostname = `hostname`
- thread_id = Thread.current.object_id
- identity = "#{thread_id}@#{hostname}"
-
- connection_class.new(host, port, identity)
+ connection_class.new(host, port, identity, credentials, converter, options)
end
end
end
diff --git a/lib/temporal/connection/converter/base.rb b/lib/temporal/connection/converter/base.rb
index 93b09b2b..17669983 100644
--- a/lib/temporal/connection/converter/base.rb
+++ b/lib/temporal/connection/converter/base.rb
@@ -17,7 +17,7 @@ def from_payload(payload)
def to_payloads(data)
return nil if data.nil?
- Temporal::Api::Common::V1::Payloads.new(
+ Temporalio::Api::Common::V1::Payloads.new(
payloads: data.map(&method(:to_payload))
)
end
diff --git a/lib/temporal/connection/converter/codec/base.rb b/lib/temporal/connection/converter/codec/base.rb
new file mode 100644
index 00000000..d8748909
--- /dev/null
+++ b/lib/temporal/connection/converter/codec/base.rb
@@ -0,0 +1,35 @@
+module Temporal
+ module Connection
+ module Converter
+ module Codec
+ class Base
+ def encodes(payloads)
+ return nil if payloads.nil?
+
+ Temporalio::Api::Common::V1::Payloads.new(
+ payloads: payloads.payloads.map(&method(:encode))
+ )
+ end
+
+ def decodes(payloads)
+ return nil if payloads.nil?
+
+ Temporalio::Api::Common::V1::Payloads.new(
+ payloads: payloads.payloads.map(&method(:decode))
+ )
+ end
+
+ def encode(payload)
+ # should return Temporalio::Api::Common::V1::Payload
+ raise NotImplementedError, 'codec converter needs to implement encode'
+ end
+
+ def decode(payload)
+ # should return Temporalio::Api::Common::V1::Payload
+ raise NotImplementedError, 'codec converter needs to implement decode'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/converter/codec/chain.rb b/lib/temporal/connection/converter/codec/chain.rb
new file mode 100644
index 00000000..fc1a16f8
--- /dev/null
+++ b/lib/temporal/connection/converter/codec/chain.rb
@@ -0,0 +1,36 @@
+require 'temporal/connection/converter/codec/base'
+
+module Temporal
+ module Connection
+ module Converter
+ module Codec
+ # Performs encoding/decoding on the payloads via the given payload codecs. When encoding
+ # the codecs are applied last to first meaning the earlier encodings wrap the later ones.
+ # When decoding, the codecs are applied first to last to reverse the effect.
+ class Chain < Base
+ def initialize(payload_codecs:)
+ @payload_codecs = payload_codecs
+ end
+
+ def encode(payload)
+ payload_codecs.reverse_each do |payload_codec|
+ payload = payload_codec.encode(payload)
+ end
+ payload
+ end
+
+ def decode(payload)
+ payload_codecs.each do |payload_codec|
+ payload = payload_codec.decode(payload)
+ end
+ payload
+ end
+
+ private
+
+ attr_reader :payload_codecs
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/converter/composite.rb b/lib/temporal/connection/converter/composite.rb
index 2b0d2f11..640ae4f7 100644
--- a/lib/temporal/connection/converter/composite.rb
+++ b/lib/temporal/connection/converter/composite.rb
@@ -25,7 +25,7 @@ def from_payload(payload)
converter = payload_converters_by_encoding[encoding]
if converter.nil?
- raise ConverterNotFound
+ raise ConverterNotFound, "Could not find PayloadConverter for #{encoding}"
end
converter.from_payload(payload)
diff --git a/lib/temporal/connection/converter/payload/bytes.rb b/lib/temporal/connection/converter/payload/bytes.rb
index 16b157c8..2b8da7e6 100644
--- a/lib/temporal/connection/converter/payload/bytes.rb
+++ b/lib/temporal/connection/converter/payload/bytes.rb
@@ -18,7 +18,7 @@ def from_payload(payload)
def to_payload(data)
return nil unless data.is_a?(String) && data.encoding == Encoding::ASCII_8BIT
- Temporal::Api::Common::V1::Payload.new(
+ Temporalio::Api::Common::V1::Payload.new(
metadata: { 'encoding' => ENCODING },
data: data
)
diff --git a/lib/temporal/connection/converter/payload/json.rb b/lib/temporal/connection/converter/payload/json.rb
index 0e1665e0..1cd7b4d1 100644
--- a/lib/temporal/connection/converter/payload/json.rb
+++ b/lib/temporal/connection/converter/payload/json.rb
@@ -16,7 +16,7 @@ def from_payload(payload)
end
def to_payload(data)
- Temporal::Api::Common::V1::Payload.new(
+ Temporalio::Api::Common::V1::Payload.new(
metadata: { 'encoding' => ENCODING },
data: Temporal::JSON.serialize(data).b
)
diff --git a/lib/temporal/connection/converter/payload/nil.rb b/lib/temporal/connection/converter/payload/nil.rb
index 7337520f..856aa012 100644
--- a/lib/temporal/connection/converter/payload/nil.rb
+++ b/lib/temporal/connection/converter/payload/nil.rb
@@ -16,7 +16,7 @@ def from_payload(payload)
def to_payload(data)
return nil unless data.nil?
- Temporal::Api::Common::V1::Payload.new(
+ Temporalio::Api::Common::V1::Payload.new(
metadata: { 'encoding' => ENCODING }
)
end
diff --git a/lib/temporal/connection/converter/payload/proto_json.rb b/lib/temporal/connection/converter/payload/proto_json.rb
new file mode 100644
index 00000000..cdbc36f9
--- /dev/null
+++ b/lib/temporal/connection/converter/payload/proto_json.rb
@@ -0,0 +1,35 @@
+require 'temporal/json'
+
+module Temporal
+ module Connection
+ module Converter
+ module Payload
+ class ProtoJSON
+ ENCODING = 'json/protobuf'.freeze
+
+ def encoding
+ ENCODING
+ end
+
+ def from_payload(payload)
+ # TODO: Add error handling.
+ message_type = payload.metadata['messageType']
+ descriptor = Google::Protobuf::DescriptorPool.generated_pool.lookup(message_type)
+ descriptor.msgclass.decode_json(payload.data)
+ end
+
+ def to_payload(data)
+ return unless data.is_a?(Google::Protobuf::MessageExts)
+ Temporalio::Api::Common::V1::Payload.new(
+ metadata: {
+ 'encoding' => ENCODING,
+ 'messageType' => data.class.descriptor.name,
+ },
+ data: data.to_json.b,
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/grpc.rb b/lib/temporal/connection/grpc.rb
index 848d2de0..f35d2dc3 100644
--- a/lib/temporal/connection/grpc.rb
+++ b/lib/temporal/connection/grpc.rb
@@ -1,44 +1,80 @@
require 'grpc'
+require 'time'
require 'google/protobuf/well_known_types'
require 'securerandom'
+require 'json'
+require 'gen/temporal/api/filter/v1/message_pb'
+require 'gen/temporal/api/workflowservice/v1/service_services_pb'
+require 'gen/temporal/api/operatorservice/v1/service_services_pb'
+require 'gen/temporal/api/enums/v1/workflow_pb'
+require 'gen/temporal/api/enums/v1/common_pb'
require 'temporal/connection/errors'
+require 'temporal/connection/interceptors/client_name_version_interceptor'
require 'temporal/connection/serializer'
require 'temporal/connection/serializer/failure'
-require 'gen/temporal/api/workflowservice/v1/service_services_pb'
-require 'temporal/concerns/payloads'
+require 'temporal/connection/serializer/backfill'
+require 'temporal/connection/serializer/schedule'
+require 'temporal/connection/serializer/workflow_id_reuse_policy'
module Temporal
module Connection
class GRPC
- include Concerns::Payloads
+ HISTORY_EVENT_FILTER = {
+ all: Temporalio::Api::Enums::V1::HistoryEventFilterType::HISTORY_EVENT_FILTER_TYPE_ALL_EVENT,
+ close: Temporalio::Api::Enums::V1::HistoryEventFilterType::HISTORY_EVENT_FILTER_TYPE_CLOSE_EVENT
+ }.freeze
- WORKFLOW_ID_REUSE_POLICY = {
- allow_failed: Temporal::Api::Enums::V1::WorkflowIdReusePolicy::WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE_FAILED_ONLY,
- allow: Temporal::Api::Enums::V1::WorkflowIdReusePolicy::WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE,
- reject: Temporal::Api::Enums::V1::WorkflowIdReusePolicy::WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE
+ QUERY_REJECT_CONDITION = {
+ none: Temporalio::Api::Enums::V1::QueryRejectCondition::QUERY_REJECT_CONDITION_NONE,
+ not_open: Temporalio::Api::Enums::V1::QueryRejectCondition::QUERY_REJECT_CONDITION_NOT_OPEN,
+ not_completed_cleanly: Temporalio::Api::Enums::V1::QueryRejectCondition::QUERY_REJECT_CONDITION_NOT_COMPLETED_CLEANLY
}.freeze
- HISTORY_EVENT_FILTER = {
- all: Temporal::Api::Enums::V1::HistoryEventFilterType::HISTORY_EVENT_FILTER_TYPE_ALL_EVENT,
- close: Temporal::Api::Enums::V1::HistoryEventFilterType::HISTORY_EVENT_FILTER_TYPE_CLOSE_EVENT,
+ SYMBOL_TO_INDEXED_VALUE_TYPE = {
+ text: Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_TEXT,
+ keyword: Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_KEYWORD,
+ int: Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_INT,
+ double: Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_DOUBLE,
+ bool: Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_BOOL,
+ datetime: Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_DATETIME,
+ keyword_list: Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_KEYWORD_LIST
}.freeze
- def initialize(host, port, identity)
+ INDEXED_VALUE_TYPE_TO_SYMBOL = SYMBOL_TO_INDEXED_VALUE_TYPE.map do |symbol, int_value|
+ [Temporalio::Api::Enums::V1::IndexedValueType.lookup(int_value), symbol]
+ end.to_h.freeze
+
+ SYMBOL_TO_RESET_REAPPLY_TYPE = {
+ signal: Temporalio::Api::Enums::V1::ResetReapplyType::RESET_REAPPLY_TYPE_SIGNAL,
+ none: Temporalio::Api::Enums::V1::ResetReapplyType::RESET_REAPPLY_TYPE_NONE
+ }
+
+ DEFAULT_OPTIONS = {
+ max_page_size: 100
+ }.freeze
+
+ CONNECTION_TIMEOUT_SECONDS = 60
+
+ def initialize(host, port, identity, credentials, converter, options = {})
@url = "#{host}:#{port}"
@identity = identity
+ @credentials = credentials
+ @converter = converter
@poll = true
@poll_mutex = Mutex.new
@poll_request = nil
+ @options = DEFAULT_OPTIONS.merge(options)
end
- def register_namespace(name:, description: nil, global: false, retention_period: 10)
- request = Temporal::Api::WorkflowService::V1::RegisterNamespaceRequest.new(
+ def register_namespace(name:, description: nil, is_global: false, retention_period: 10, data: nil)
+ request = Temporalio::Api::WorkflowService::V1::RegisterNamespaceRequest.new(
namespace: name,
description: description,
- is_global_namespace: global,
+ is_global_namespace: is_global,
workflow_execution_retention_period: Google::Protobuf::Duration.new(
- seconds: retention_period * 24 * 60 * 60
- )
+ seconds: (retention_period * 24 * 60 * 60).to_i
+ ),
+ data: data
)
client.register_namespace(request)
rescue ::GRPC::AlreadyExists => e
@@ -46,19 +82,20 @@ def register_namespace(name:, description: nil, global: false, retention_period:
end
def describe_namespace(name:)
- request = Temporal::Api::WorkflowService::V1::DescribeNamespaceRequest.new(namespace: name)
+ request = Temporalio::Api::WorkflowService::V1::DescribeNamespaceRequest.new(namespace: name)
client.describe_namespace(request)
end
- def list_namespaces(page_size:)
- request = Temporal::Api::WorkflowService::V1::ListNamespacesRequest.new(pageSize: page_size)
+ def list_namespaces(page_size:, next_page_token: '')
+ request = Temporalio::Api::WorkflowService::V1::ListNamespacesRequest.new(page_size: page_size,
+ next_page_token: next_page_token)
client.list_namespaces(request)
end
def update_namespace(name:, description:)
- request = Temporal::Api::WorkflowService::V1::UpdateNamespaceRequest.new(
+ request = Temporalio::Api::WorkflowService::V1::UpdateNamespaceRequest.new(
namespace: name,
- update_info: Temporal::Api::WorkflowService::V1::UpdateNamespaceInfo.new(
+ update_info: Temporalio::Api::WorkflowService::V1::UpdateNamespaceInfo.new(
description: description
)
)
@@ -66,7 +103,7 @@ def update_namespace(name:, description:)
end
def deprecate_namespace(name:)
- request = Temporal::Api::WorkflowService::V1::DeprecateNamespaceRequest.new(namespace: name)
+ request = Temporalio::Api::WorkflowService::V1::DeprecateNamespaceRequest.new(namespace: name)
client.deprecate_namespace(request)
end
@@ -75,42 +112,46 @@ def start_workflow_execution(
workflow_id:,
workflow_name:,
task_queue:,
- input: nil,
execution_timeout:,
run_timeout:,
task_timeout:,
+ input: nil,
workflow_id_reuse_policy: nil,
headers: nil,
- cron_schedule: nil
+ cron_schedule: nil,
+ memo: nil,
+ search_attributes: nil,
+ start_delay: nil
)
- request = Temporal::Api::WorkflowService::V1::StartWorkflowExecutionRequest.new(
+ request = Temporalio::Api::WorkflowService::V1::StartWorkflowExecutionRequest.new(
identity: identity,
namespace: namespace,
- workflow_type: Temporal::Api::Common::V1::WorkflowType.new(
+ workflow_type: Temporalio::Api::Common::V1::WorkflowType.new(
name: workflow_name
),
workflow_id: workflow_id,
- task_queue: Temporal::Api::TaskQueue::V1::TaskQueue.new(
+ workflow_id_reuse_policy: Temporal::Connection::Serializer::WorkflowIdReusePolicy.new(workflow_id_reuse_policy, converter).to_proto,
+ task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new(
name: task_queue
),
- input: to_payloads(input),
+ input: converter.to_payloads(input),
workflow_execution_timeout: execution_timeout,
workflow_run_timeout: run_timeout,
workflow_task_timeout: task_timeout,
+ workflow_start_delay: start_delay,
request_id: SecureRandom.uuid,
- header: Temporal::Api::Common::V1::Header.new(
- fields: headers
+ header: Temporalio::Api::Common::V1::Header.new(
+ fields: converter.to_payload_map(headers || {})
+ ),
+ cron_schedule: cron_schedule,
+ memo: Temporalio::Api::Common::V1::Memo.new(
+ fields: converter.to_payload_map(memo || {})
),
- cron_schedule: cron_schedule
+ search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new(
+ indexed_fields: converter.to_payload_map_without_codec(search_attributes || {})
+ )
)
- if workflow_id_reuse_policy
- policy = WORKFLOW_ID_REUSE_POLICY[workflow_id_reuse_policy]
- raise Client::ArgumentError, 'Unknown workflow_id_reuse_policy specified' unless policy
-
- request.workflow_id_reuse_policy = policy
- end
-
client.start_workflow_execution(request)
rescue ::GRPC::AlreadyExists => e
# Feel like there should be cleaner way to do this...
@@ -129,19 +170,18 @@ def get_workflow_execution_history(
event_type: :all,
timeout: nil
)
- if wait_for_new_event
+ if wait_for_new_event
if timeout.nil?
# This is an internal error. Wrappers should enforce this.
- raise "You must specify a timeout when wait_for_new_event = true."
+ raise 'You must specify a timeout when wait_for_new_event = true.'
elsif timeout > SERVER_MAX_GET_WORKFLOW_EXECUTION_HISTORY_POLL
- raise ClientError.new(
- "You may not specify a timeout of more than #{SERVER_MAX_GET_WORKFLOW_EXECUTION_HISTORY_POLL} seconds, got: #{timeout}."
- )
+ raise ClientError,
+ "You may not specify a timeout of more than #{SERVER_MAX_GET_WORKFLOW_EXECUTION_HISTORY_POLL} seconds, got: #{timeout}."
end
end
- request = Temporal::Api::WorkflowService::V1::GetWorkflowExecutionHistoryRequest.new(
+ request = Temporalio::Api::WorkflowService::V1::GetWorkflowExecutionHistoryRequest.new(
namespace: namespace,
- execution: Temporal::Api::Common::V1::WorkflowExecution.new(
+ execution: Temporalio::Api::Common::V1::WorkflowExecution.new(
workflow_id: workflow_id,
run_id: run_id
),
@@ -153,63 +193,98 @@ def get_workflow_execution_history(
client.get_workflow_execution_history(request, deadline: deadline)
end
- def poll_workflow_task_queue(namespace:, task_queue:)
- request = Temporal::Api::WorkflowService::V1::PollWorkflowTaskQueueRequest.new(
+ def poll_workflow_task_queue(namespace:, task_queue:, binary_checksum:)
+ request = Temporalio::Api::WorkflowService::V1::PollWorkflowTaskQueueRequest.new(
identity: identity,
namespace: namespace,
- task_queue: Temporal::Api::TaskQueue::V1::TaskQueue.new(
+ task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new(
name: task_queue
- )
+ ),
+ binary_checksum: binary_checksum
)
poll_mutex.synchronize do
return unless can_poll?
+
@poll_request = client.poll_workflow_task_queue(request, return_op: true)
end
poll_request.execute
end
- def respond_workflow_task_completed(task_token:, commands:)
- request = Temporal::Api::WorkflowService::V1::RespondWorkflowTaskCompletedRequest.new(
+ def respond_query_task_completed(namespace:, task_token:, query_result:)
+ query_result_proto = Serializer.serialize(query_result, converter)
+ request = Temporalio::Api::WorkflowService::V1::RespondQueryTaskCompletedRequest.new(
+ task_token: task_token,
+ namespace: namespace,
+ completed_type: query_result_proto.result_type,
+ query_result: query_result_proto.answer,
+ error_message: query_result_proto.error_message
+ )
+
+ client.respond_query_task_completed(request)
+ end
+
+ def respond_workflow_task_completed(namespace:, task_token:, commands:, binary_checksum:, new_sdk_flags_used:, query_results: {})
+ request = Temporalio::Api::WorkflowService::V1::RespondWorkflowTaskCompletedRequest.new(
+ namespace: namespace,
identity: identity,
task_token: task_token,
- commands: Array(commands).map { |(_, command)| Serializer.serialize(command) }
+ commands: Array(commands).map { |(_, command)| Serializer.serialize(command, converter) },
+ query_results: query_results.transform_values { |value| Serializer.serialize(value, converter) },
+ binary_checksum: binary_checksum,
+ sdk_metadata: if new_sdk_flags_used.any?
+ Temporalio::Api::Sdk::V1::WorkflowTaskCompletedMetadata.new(
+ lang_used_flags: new_sdk_flags_used.to_a
+ )
+ # else nil
+ end
)
+
client.respond_workflow_task_completed(request)
end
- def respond_workflow_task_failed(task_token:, cause:, exception: nil)
- request = Temporal::Api::WorkflowService::V1::RespondWorkflowTaskFailedRequest.new(
+ def respond_workflow_task_failed(namespace:, task_token:, cause:, exception:, binary_checksum:)
+ request = Temporalio::Api::WorkflowService::V1::RespondWorkflowTaskFailedRequest.new(
+ namespace: namespace,
identity: identity,
task_token: task_token,
cause: cause,
- failure: Serializer::Failure.new(exception).to_proto
+ failure: Serializer::Failure.new(exception, converter).to_proto,
+ binary_checksum: binary_checksum
)
client.respond_workflow_task_failed(request)
end
- def poll_activity_task_queue(namespace:, task_queue:)
- request = Temporal::Api::WorkflowService::V1::PollActivityTaskQueueRequest.new(
+ def poll_activity_task_queue(namespace:, task_queue:, max_tasks_per_second: 0)
+ request = Temporalio::Api::WorkflowService::V1::PollActivityTaskQueueRequest.new(
identity: identity,
namespace: namespace,
- task_queue: Temporal::Api::TaskQueue::V1::TaskQueue.new(
+ task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new(
name: task_queue
)
)
+ if max_tasks_per_second > 0
+ request.task_queue_metadata = Temporalio::Api::TaskQueue::V1::TaskQueueMetadata.new(
+ max_tasks_per_second: Google::Protobuf::DoubleValue.new(value: max_tasks_per_second)
+ )
+ end
+
poll_mutex.synchronize do
return unless can_poll?
+
@poll_request = client.poll_activity_task_queue(request, return_op: true)
end
poll_request.execute
end
- def record_activity_task_heartbeat(task_token:, details: nil)
- request = Temporal::Api::WorkflowService::V1::RecordActivityTaskHeartbeatRequest.new(
+ def record_activity_task_heartbeat(namespace:, task_token:, details: nil)
+ request = Temporalio::Api::WorkflowService::V1::RecordActivityTaskHeartbeatRequest.new(
+ namespace: namespace,
task_token: task_token,
- details: to_details_payloads(details),
+ details: converter.to_details_payloads(details),
identity: identity
)
client.record_activity_task_heartbeat(request)
@@ -219,52 +294,56 @@ def record_activity_task_heartbeat_by_id
raise NotImplementedError
end
- def respond_activity_task_completed(task_token:, result:)
- request = Temporal::Api::WorkflowService::V1::RespondActivityTaskCompletedRequest.new(
+ def respond_activity_task_completed(namespace:, task_token:, result:)
+ request = Temporalio::Api::WorkflowService::V1::RespondActivityTaskCompletedRequest.new(
+ namespace: namespace,
identity: identity,
task_token: task_token,
- result: to_result_payloads(result),
+ result: converter.to_result_payloads(result)
)
client.respond_activity_task_completed(request)
end
def respond_activity_task_completed_by_id(namespace:, activity_id:, workflow_id:, run_id:, result:)
- request = Temporal::Api::WorkflowService::V1::RespondActivityTaskCompletedByIdRequest.new(
+ request = Temporalio::Api::WorkflowService::V1::RespondActivityTaskCompletedByIdRequest.new(
identity: identity,
namespace: namespace,
workflow_id: workflow_id,
run_id: run_id,
activity_id: activity_id,
- result: to_result_payloads(result)
+ result: converter.to_result_payloads(result)
)
client.respond_activity_task_completed_by_id(request)
end
- def respond_activity_task_failed(task_token:, exception:)
- request = Temporal::Api::WorkflowService::V1::RespondActivityTaskFailedRequest.new(
+ def respond_activity_task_failed(namespace:, task_token:, exception:)
+ serialize_whole_error = options.fetch(:use_error_serialization_v2)
+ request = Temporalio::Api::WorkflowService::V1::RespondActivityTaskFailedRequest.new(
+ namespace: namespace,
identity: identity,
task_token: task_token,
- failure: Serializer::Failure.new(exception).to_proto
+ failure: Serializer::Failure.new(exception, converter, serialize_whole_error: serialize_whole_error).to_proto
)
client.respond_activity_task_failed(request)
end
def respond_activity_task_failed_by_id(namespace:, activity_id:, workflow_id:, run_id:, exception:)
- request = Temporal::Api::WorkflowService::V1::RespondActivityTaskFailedByIdRequest.new(
+ request = Temporalio::Api::WorkflowService::V1::RespondActivityTaskFailedByIdRequest.new(
identity: identity,
namespace: namespace,
workflow_id: workflow_id,
run_id: run_id,
activity_id: activity_id,
- failure: Serializer::Failure.new(exception).to_proto
+ failure: Serializer::Failure.new(exception, converter).to_proto
)
client.respond_activity_task_failed_by_id(request)
end
- def respond_activity_task_canceled(task_token:, details: nil)
- request = Temporal::Api::WorkflowService::V1::RespondActivityTaskCanceledRequest.new(
+ def respond_activity_task_canceled(namespace:, task_token:, details: nil)
+ request = Temporalio::Api::WorkflowService::V1::RespondActivityTaskCanceledRequest.new(
+ namespace: namespace,
task_token: task_token,
- details: to_details_payloads(details),
+ details: converter.to_details_payloads(details),
identity: identity
)
client.respond_activity_task_canceled(request)
@@ -279,33 +358,95 @@ def request_cancel_workflow_execution
end
def signal_workflow_execution(namespace:, workflow_id:, run_id:, signal:, input: nil)
- request = Temporal::Api::WorkflowService::V1::SignalWorkflowExecutionRequest.new(
+ request = Temporalio::Api::WorkflowService::V1::SignalWorkflowExecutionRequest.new(
namespace: namespace,
- workflow_execution: Temporal::Api::Common::V1::WorkflowExecution.new(
+ workflow_execution: Temporalio::Api::Common::V1::WorkflowExecution.new(
workflow_id: workflow_id,
run_id: run_id
),
signal_name: signal,
- input: to_signal_payloads(input),
+ input: converter.to_signal_payloads(input),
identity: identity
)
client.signal_workflow_execution(request)
end
- def signal_with_start_workflow_execution
- raise NotImplementedError
+ def signal_with_start_workflow_execution(
+ namespace:,
+ workflow_id:,
+ workflow_name:,
+ task_queue:,
+ execution_timeout:, run_timeout:, task_timeout:, signal_name:, signal_input:, input: nil,
+ workflow_id_reuse_policy: nil,
+ headers: nil,
+ cron_schedule: nil,
+ memo: nil,
+ search_attributes: nil,
+ start_delay: nil
+ )
+ proto_header_fields = if headers.nil?
+ converter.to_payload_map({})
+ elsif headers.instance_of?(Hash)
+ converter.to_payload_map(headers)
+ else
+ # Preserve backward compatability for headers specified using proto objects
+ warn '[DEPRECATION] Specify headers using a hash rather than protobuf objects'
+ headers
+ end
+
+ request = Temporalio::Api::WorkflowService::V1::SignalWithStartWorkflowExecutionRequest.new(
+ identity: identity,
+ namespace: namespace,
+ workflow_type: Temporalio::Api::Common::V1::WorkflowType.new(
+ name: workflow_name
+ ),
+ workflow_id: workflow_id,
+ workflow_id_reuse_policy: Temporal::Connection::Serializer::WorkflowIdReusePolicy.new(workflow_id_reuse_policy, converter).to_proto,
+ task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new(
+ name: task_queue
+ ),
+ input: converter.to_payloads(input),
+ workflow_execution_timeout: execution_timeout,
+ workflow_run_timeout: run_timeout,
+ workflow_task_timeout: task_timeout,
+ workflow_start_delay: start_delay,
+ request_id: SecureRandom.uuid,
+ header: Temporalio::Api::Common::V1::Header.new(
+ fields: proto_header_fields
+ ),
+ cron_schedule: cron_schedule,
+ signal_name: signal_name,
+ signal_input: converter.to_signal_payloads(signal_input),
+ memo: Temporalio::Api::Common::V1::Memo.new(
+ fields: converter.to_payload_map(memo || {})
+ ),
+ search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new(
+ indexed_fields: converter.to_payload_map_without_codec(search_attributes || {})
+ )
+ )
+
+ client.signal_with_start_workflow_execution(request)
end
- def reset_workflow_execution(namespace:, workflow_id:, run_id:, reason:, workflow_task_event_id:)
- request = Temporal::Api::WorkflowService::V1::ResetWorkflowExecutionRequest.new(
+ def reset_workflow_execution(namespace:, workflow_id:, run_id:, reason:, workflow_task_event_id:, request_id:, reset_reapply_type: Temporal::ResetReapplyType::SIGNAL)
+ request = Temporalio::Api::WorkflowService::V1::ResetWorkflowExecutionRequest.new(
namespace: namespace,
- workflow_execution: Temporal::Api::Common::V1::WorkflowExecution.new(
+ workflow_execution: Temporalio::Api::Common::V1::WorkflowExecution.new(
workflow_id: workflow_id,
- run_id: run_id,
+ run_id: run_id
),
reason: reason,
- workflow_task_finish_event_id: workflow_task_event_id
+ workflow_task_finish_event_id: workflow_task_event_id,
+ request_id: request_id
)
+
+ if reset_reapply_type
+ reapply_type = SYMBOL_TO_RESET_REAPPLY_TYPE[reset_reapply_type]
+ raise Client::ArgumentError, 'Unknown reset_reapply_type specified' unless reapply_type
+
+ request.reset_reapply_type = reapply_type
+ end
+
client.reset_workflow_execution(request)
end
@@ -316,30 +457,53 @@ def terminate_workflow_execution(
reason: nil,
details: nil
)
- request = Temporal::Api::WorkflowService::V1::TerminateWorkflowExecutionRequest.new(
+ request = Temporalio::Api::WorkflowService::V1::TerminateWorkflowExecutionRequest.new(
identity: identity,
namespace: namespace,
- workflow_execution: Temporal::Api::Common::V1::WorkflowExecution.new(
+ workflow_execution: Temporalio::Api::Common::V1::WorkflowExecution.new(
workflow_id: workflow_id,
- run_id: run_id,
+ run_id: run_id
),
reason: reason,
- details: to_details_payloads(details)
+ details: converter.to_details_payloads(details)
)
client.terminate_workflow_execution(request)
end
- def list_open_workflow_executions
- raise NotImplementedError
+ def list_open_workflow_executions(namespace:, from:, to:, next_page_token: nil, workflow_id: nil, workflow: nil, max_page_size: nil)
+ request = Temporalio::Api::WorkflowService::V1::ListOpenWorkflowExecutionsRequest.new(
+ namespace: namespace,
+ maximum_page_size: max_page_size.nil? ? options[:max_page_size] : max_page_size,
+ next_page_token: next_page_token,
+ start_time_filter: serialize_time_filter(from, to),
+ execution_filter: serialize_execution_filter(workflow_id),
+ type_filter: serialize_type_filter(workflow)
+ )
+ client.list_open_workflow_executions(request)
end
- def list_closed_workflow_executions
- raise NotImplementedError
+ def list_closed_workflow_executions(namespace:, from:, to:, next_page_token: nil, workflow_id: nil, workflow: nil, status: nil, max_page_size: nil)
+ request = Temporalio::Api::WorkflowService::V1::ListClosedWorkflowExecutionsRequest.new(
+ namespace: namespace,
+ maximum_page_size: max_page_size.nil? ? options[:max_page_size] : max_page_size,
+ next_page_token: next_page_token,
+ start_time_filter: serialize_time_filter(from, to),
+ execution_filter: serialize_execution_filter(workflow_id),
+ type_filter: serialize_type_filter(workflow),
+ status_filter: serialize_status_filter(status)
+ )
+ client.list_closed_workflow_executions(request)
end
- def list_workflow_executions
- raise NotImplementedError
+ def list_workflow_executions(namespace:, query:, next_page_token: nil, max_page_size: nil)
+ request = Temporalio::Api::WorkflowService::V1::ListWorkflowExecutionsRequest.new(
+ namespace: namespace,
+ page_size: max_page_size.nil? ? options[:max_page_size] : max_page_size,
+ next_page_token: next_page_token,
+ query: query
+ )
+ client.list_workflow_executions(request)
end
def list_archived_workflow_executions
@@ -350,30 +514,101 @@ def scan_workflow_executions
raise NotImplementedError
end
- def count_workflow_executions
- raise NotImplementedError
+ def count_workflow_executions(namespace:, query:)
+ request = Temporalio::Api::WorkflowService::V1::CountWorkflowExecutionsRequest.new(
+ namespace: namespace,
+ query: query
+ )
+ client.count_workflow_executions(request)
end
- def get_search_attributes
- raise NotImplementedError
+ def add_custom_search_attributes(attributes, namespace)
+ attributes.each_value do |symbol_type|
+ next if SYMBOL_TO_INDEXED_VALUE_TYPE.include?(symbol_type)
+
+ raise Temporal::InvalidSearchAttributeTypeFailure,
+ "Cannot add search attributes (#{attributes}): unknown search attribute type :#{symbol_type}, supported types: #{SYMBOL_TO_INDEXED_VALUE_TYPE.keys}"
+ end
+
+ request = Temporalio::Api::OperatorService::V1::AddSearchAttributesRequest.new(
+ search_attributes: attributes.map { |name, type| [name, SYMBOL_TO_INDEXED_VALUE_TYPE[type]] }.to_h,
+ namespace: namespace
+ )
+ begin
+ operator_client.add_search_attributes(request)
+ rescue ::GRPC::AlreadyExists => e
+ raise Temporal::SearchAttributeAlreadyExistsFailure, e
+ rescue ::GRPC::Internal => e
+ # The internal workflow that adds search attributes can fail for a variety of reasons such
+ # as recreating a removed attribute with a new type. Wrap these all up into a fall through
+ # exception.
+ raise Temporal::SearchAttributeFailure, e
+ end
end
- def respond_query_task_completed
- raise NotImplementedError
+ def list_custom_search_attributes(namespace)
+ request = Temporalio::Api::OperatorService::V1::ListSearchAttributesRequest.new(
+ namespace: namespace
+ )
+ response = operator_client.list_search_attributes(request)
+ response.custom_attributes.map { |name, type| [name, INDEXED_VALUE_TYPE_TO_SYMBOL[type]] }.to_h
+ end
+
+ def remove_custom_search_attributes(attribute_names, namespace)
+ request = Temporalio::Api::OperatorService::V1::RemoveSearchAttributesRequest.new(
+ search_attributes: attribute_names,
+ namespace: namespace
+ )
+ begin
+ operator_client.remove_search_attributes(request)
+ rescue ::GRPC::NotFound => e
+ raise Temporal::NotFoundFailure, e
+ end
end
def reset_sticky_task_queue
raise NotImplementedError
end
- def query_workflow
- raise NotImplementedError
+ def query_workflow(namespace:, workflow_id:, run_id:, query:, args: nil, query_reject_condition: nil)
+ request = Temporalio::Api::WorkflowService::V1::QueryWorkflowRequest.new(
+ namespace: namespace,
+ execution: Temporalio::Api::Common::V1::WorkflowExecution.new(
+ workflow_id: workflow_id,
+ run_id: run_id
+ ),
+ query: Temporalio::Api::Query::V1::WorkflowQuery.new(
+ query_type: query,
+ query_args: converter.to_query_payloads(args)
+ )
+ )
+ if query_reject_condition
+ condition = QUERY_REJECT_CONDITION[query_reject_condition]
+ raise Client::ArgumentError, 'Unknown query_reject_condition specified' unless condition
+
+ request.query_reject_condition = condition
+ end
+
+ begin
+ response = client.query_workflow(request)
+ rescue ::GRPC::InvalidArgument => e
+ raise Temporal::QueryFailed, e.details
+ end
+
+ if response.query_rejected
+ rejection_status = response.query_rejected.status || 'not specified by server'
+ raise Temporal::QueryFailed, "Query rejected: status #{rejection_status}"
+ elsif !response.query_result
+ raise Temporal::QueryFailed, 'Invalid response from server'
+ else
+ converter.from_query_payloads(response.query_result)
+ end
end
def describe_workflow_execution(namespace:, workflow_id:, run_id:)
- request = Temporal::Api::WorkflowService::V1::DescribeWorkflowExecutionRequest.new(
+ request = Temporalio::Api::WorkflowService::V1::DescribeWorkflowExecutionRequest.new(
namespace: namespace,
- execution: Temporal::Api::Common::V1::WorkflowExecution.new(
+ execution: Temporalio::Api::Common::V1::WorkflowExecution.new(
workflow_id: workflow_id,
run_id: run_id
)
@@ -382,12 +617,12 @@ def describe_workflow_execution(namespace:, workflow_id:, run_id:)
end
def describe_task_queue(namespace:, task_queue:)
- request = Temporal::Api::WorkflowService::V1::DescribeTaskQueueRequest.new(
+ request = Temporalio::Api::WorkflowService::V1::DescribeTaskQueueRequest.new(
namespace: namespace,
- task_queue: Temporal::Api::TaskQueue::V1::TaskQueue.new(
+ task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new(
name: task_queue
),
- task_queue_type: Temporal::Api::Enums::V1::TaskQueueType::Workflow,
+ task_queue_type: Temporalio::Api::Enums::V1::TaskQueueType::TASK_QUEUE_TYPE_WORKFLOW,
include_task_queue_status: true
)
client.describe_task_queue(request)
@@ -400,21 +635,259 @@ def cancel_polling_request
end
end
+ def get_system_info
+ client.get_system_info(Temporalio::Api::WorkflowService::V1::GetSystemInfoRequest.new)
+ end
+
+ def list_schedules(namespace:, maximum_page_size:, next_page_token:)
+ request = Temporalio::Api::WorkflowService::V1::ListSchedulesRequest.new(
+ namespace: namespace,
+ maximum_page_size: maximum_page_size,
+ next_page_token: next_page_token
+ )
+ resp = client.list_schedules(request)
+
+ Temporal::Schedule::ListSchedulesResponse.new(
+ schedules: resp.schedules.map do |schedule|
+ Temporal::Schedule::ScheduleListEntry.new(
+ schedule_id: schedule.schedule_id,
+ memo: converter.from_payload_map(schedule.memo&.fields || {}),
+ search_attributes: converter.from_payload_map_without_codec(schedule.search_attributes&.indexed_fields || {}),
+ info: schedule.info
+ )
+ end,
+ next_page_token: resp.next_page_token,
+ )
+ end
+
+ def describe_schedule(namespace:, schedule_id:)
+ request = Temporalio::Api::WorkflowService::V1::DescribeScheduleRequest.new(
+ namespace: namespace,
+ schedule_id: schedule_id
+ )
+
+ resp = nil
+ begin
+ resp = client.describe_schedule(request)
+ rescue ::GRPC::NotFound => e
+ raise Temporal::NotFoundFailure, e
+ end
+
+ Temporal::Schedule::DescribeScheduleResponse.new(
+ schedule: resp.schedule,
+ info: resp.info,
+ memo: converter.from_payload_map(resp.memo&.fields || {}),
+ search_attributes: converter.from_payload_map_without_codec(resp.search_attributes&.indexed_fields || {}),
+ conflict_token: resp.conflict_token
+ )
+ end
+
+ def create_schedule(
+ namespace:,
+ schedule_id:,
+ schedule:,
+ trigger_immediately: nil,
+ backfill: nil,
+ memo: nil,
+ search_attributes: nil
+ )
+ initial_patch = nil
+ if trigger_immediately || backfill
+ initial_patch = Temporalio::Api::Schedule::V1::SchedulePatch.new
+ if trigger_immediately
+ initial_patch.trigger_immediately = Temporalio::Api::Schedule::V1::TriggerImmediatelyRequest.new(
+ overlap_policy: Temporal::Connection::Serializer::ScheduleOverlapPolicy.new(
+ schedule.policies&.overlap_policy,
+ converter
+ ).to_proto
+ )
+ end
+
+ if backfill
+ initial_patch.backfill_request += [Temporal::Connection::Serializer::Backfill.new(backfill, converter).to_proto]
+ end
+ end
+
+ request = Temporalio::Api::WorkflowService::V1::CreateScheduleRequest.new(
+ namespace: namespace,
+ schedule_id: schedule_id,
+ schedule: Temporal::Connection::Serializer::Schedule.new(schedule, converter).to_proto,
+ identity: identity,
+ request_id: SecureRandom.uuid,
+ memo: Temporalio::Api::Common::V1::Memo.new(
+ fields: converter.to_payload_map(memo || {})
+ ),
+ search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new(
+ indexed_fields: converter.to_payload_map_without_codec(search_attributes || {})
+ )
+ )
+ client.create_schedule(request)
+ end
+
+ def delete_schedule(namespace:, schedule_id:)
+ request = Temporalio::Api::WorkflowService::V1::DeleteScheduleRequest.new(
+ namespace: namespace,
+ schedule_id: schedule_id,
+ identity: identity
+ )
+
+ begin
+ client.delete_schedule(request)
+ rescue ::GRPC::NotFound => e
+ raise Temporal::NotFoundFailure, e
+ end
+ end
+
+ def update_schedule(namespace:, schedule_id:, schedule:, conflict_token: nil)
+ request = Temporalio::Api::WorkflowService::V1::UpdateScheduleRequest.new(
+ namespace: namespace,
+ schedule_id: schedule_id,
+ schedule: Temporal::Connection::Serializer::Schedule.new(schedule, converter).to_proto,
+ conflict_token: conflict_token,
+ identity: identity,
+ request_id: SecureRandom.uuid
+ )
+
+ begin
+ client.update_schedule(request)
+ rescue ::GRPC::NotFound => e
+ raise Temporal::NotFoundFailure, e
+ end
+ end
+
+ def trigger_schedule(namespace:, schedule_id:, overlap_policy: nil)
+ request = Temporalio::Api::WorkflowService::V1::PatchScheduleRequest.new(
+ namespace: namespace,
+ schedule_id: schedule_id,
+ patch: Temporalio::Api::Schedule::V1::SchedulePatch.new(
+ trigger_immediately: Temporalio::Api::Schedule::V1::TriggerImmediatelyRequest.new(
+ overlap_policy: Temporal::Connection::Serializer::ScheduleOverlapPolicy.new(
+ overlap_policy,
+ converter
+ ).to_proto
+ ),
+ ),
+ identity: identity,
+ request_id: SecureRandom.uuid
+ )
+
+ begin
+ client.patch_schedule(request)
+ rescue ::GRPC::NotFound => e
+ raise Temporal::NotFoundFailure, e
+ end
+ end
+
+ def pause_schedule(namespace:, schedule_id:, should_pause:, note: nil)
+ patch = Temporalio::Api::Schedule::V1::SchedulePatch.new
+ if should_pause
+ patch.pause = note || 'Paused by temporal-ruby'
+ else
+ patch.unpause = note || 'Unpaused by temporal-ruby'
+ end
+
+ request = Temporalio::Api::WorkflowService::V1::PatchScheduleRequest.new(
+ namespace: namespace,
+ schedule_id: schedule_id,
+ patch: patch,
+ identity: identity,
+ request_id: SecureRandom.uuid
+ )
+
+ begin
+ client.patch_schedule(request)
+ rescue ::GRPC::NotFound => e
+ raise Temporal::NotFoundFailure, e
+ end
+ end
+
private
- attr_reader :url, :identity, :poll_mutex, :poll_request
+ attr_reader :url, :identity, :credentials, :converter, :options, :poll_mutex, :poll_request
def client
- @client ||= Temporal::Api::WorkflowService::V1::WorkflowService::Stub.new(
+ return @client if @client
+
+ channel_args = {}
+
+ if options[:keepalive_time_ms]
+ channel_args["grpc.keepalive_time_ms"] = options[:keepalive_time_ms]
+ end
+
+ if options[:retry_connection] || options[:retry_policy]
+ channel_args["grpc.enable_retries"] = 1
+
+ retry_policy = options[:retry_policy] || {
+ retryableStatusCodes: ["UNAVAILABLE"],
+ maxAttempts: 3,
+ initialBackoff: "0.1s",
+ backoffMultiplier: 2.0,
+ maxBackoff: "0.3s"
+ }
+
+ channel_args["grpc.service_config"] = ::JSON.generate(
+ methodConfig: [
+ {
+ name: [
+ {
+ service: "temporal.api.workflowservice.v1.WorkflowService",
+ }
+ ],
+ retryPolicy: retry_policy
+ }
+ ]
+ )
+ end
+
+ @client = Temporalio::Api::WorkflowService::V1::WorkflowService::Stub.new(
+ url,
+ credentials,
+ timeout: CONNECTION_TIMEOUT_SECONDS,
+ interceptors: [ClientNameVersionInterceptor.new],
+ channel_args: channel_args
+ )
+ end
+
+ def operator_client
+ @operator_client ||= Temporalio::Api::OperatorService::V1::OperatorService::Stub.new(
url,
- :this_channel_is_insecure,
- timeout: 60
+ credentials,
+ timeout: CONNECTION_TIMEOUT_SECONDS,
+ interceptors: [ClientNameVersionInterceptor.new]
)
end
def can_poll?
@poll
end
+
+ def serialize_time_filter(from, to)
+ Temporalio::Api::Filter::V1::StartTimeFilter.new(
+ earliest_time: from&.to_time,
+ latest_time: to&.to_time
+ )
+ end
+
+ def serialize_execution_filter(value)
+ return unless value
+
+ Temporalio::Api::Filter::V1::WorkflowExecutionFilter.new(workflow_id: value)
+ end
+
+ def serialize_type_filter(value)
+ return unless value
+
+ Temporalio::Api::Filter::V1::WorkflowTypeFilter.new(name: value)
+ end
+
+ def serialize_status_filter(value)
+ return unless value
+
+ sym = Temporal::Workflow::Status::API_STATUS_MAP.invert[value]
+ status = Temporalio::Api::Enums::V1::WorkflowExecutionStatus.resolve(sym)
+
+ Temporalio::Api::Filter::V1::StatusFilter.new(status: status)
+ end
end
end
end
diff --git a/lib/temporal/connection/interceptors/client_name_version_interceptor.rb b/lib/temporal/connection/interceptors/client_name_version_interceptor.rb
new file mode 100644
index 00000000..5d70ff72
--- /dev/null
+++ b/lib/temporal/connection/interceptors/client_name_version_interceptor.rb
@@ -0,0 +1,14 @@
+require 'grpc'
+require 'temporal/version'
+
+module Temporal
+ module Connection
+ class ClientNameVersionInterceptor < GRPC::ClientInterceptor
+ def request_response(request: nil, call: nil, method: nil, metadata: nil)
+ metadata['client-name'] = 'community-ruby'
+ metadata['client-version'] = Temporal::VERSION
+ yield
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/retryer.rb b/lib/temporal/connection/retryer.rb
index d70ba3b4..2948f05f 100644
--- a/lib/temporal/connection/retryer.rb
+++ b/lib/temporal/connection/retryer.rb
@@ -1,3 +1,5 @@
+require 'grpc/errors'
+
module Temporal
module Connection
module Retryer
@@ -11,15 +13,15 @@ module Retryer
# No amount of retrying will help in these cases.
def self.do_not_retry_errors
[
- GRPC::AlreadyExists,
- GRPC::Cancelled,
- GRPC::FailedPrecondition,
- GRPC::InvalidArgument,
+ ::GRPC::AlreadyExists,
+ ::GRPC::Cancelled,
+ ::GRPC::FailedPrecondition,
+ ::GRPC::InvalidArgument,
# If the activity has timed out, the server will return this and will never accept a retry
- GRPC::NotFound,
- GRPC::PermissionDenied,
- GRPC::Unauthenticated,
- GRPC::Unimplemented,
+ ::GRPC::NotFound,
+ ::GRPC::PermissionDenied,
+ ::GRPC::Unauthenticated,
+ ::GRPC::Unimplemented,
]
end
diff --git a/lib/temporal/connection/serializer.rb b/lib/temporal/connection/serializer.rb
index 98ce71b4..b31c1005 100644
--- a/lib/temporal/connection/serializer.rb
+++ b/lib/temporal/connection/serializer.rb
@@ -1,4 +1,5 @@
require 'temporal/workflow/command'
+require 'temporal/workflow/query_result'
require 'temporal/connection/serializer/schedule_activity'
require 'temporal/connection/serializer/start_child_workflow'
require 'temporal/connection/serializer/request_activity_cancellation'
@@ -8,6 +9,10 @@
require 'temporal/connection/serializer/complete_workflow'
require 'temporal/connection/serializer/continue_as_new'
require 'temporal/connection/serializer/fail_workflow'
+require 'temporal/connection/serializer/signal_external_workflow'
+require 'temporal/connection/serializer/upsert_search_attributes'
+require 'temporal/connection/serializer/query_answer'
+require 'temporal/connection/serializer/query_failure'
module Temporal
module Connection
@@ -21,12 +26,16 @@ module Serializer
Workflow::Command::CancelTimer => Serializer::CancelTimer,
Workflow::Command::CompleteWorkflow => Serializer::CompleteWorkflow,
Workflow::Command::ContinueAsNew => Serializer::ContinueAsNew,
- Workflow::Command::FailWorkflow => Serializer::FailWorkflow
+ Workflow::Command::FailWorkflow => Serializer::FailWorkflow,
+ Workflow::Command::SignalExternalWorkflow => Serializer::SignalExternalWorkflow,
+ Workflow::Command::UpsertSearchAttributes => Serializer::UpsertSearchAttributes,
+ Workflow::QueryResult::Answer => Serializer::QueryAnswer,
+ Workflow::QueryResult::Failure => Serializer::QueryFailure,
}.freeze
- def self.serialize(object)
+ def self.serialize(object, converter)
serializer = SERIALIZERS_MAP[object.class]
- serializer.new(object).to_proto
+ serializer.new(object, converter).to_proto
end
end
end
diff --git a/lib/temporal/connection/serializer/backfill.rb b/lib/temporal/connection/serializer/backfill.rb
new file mode 100644
index 00000000..7abb40a5
--- /dev/null
+++ b/lib/temporal/connection/serializer/backfill.rb
@@ -0,0 +1,26 @@
+require "temporal/connection/serializer/base"
+require "temporal/connection/serializer/schedule_overlap_policy"
+
+module Temporal
+ module Connection
+ module Serializer
+ class Backfill < Base
+ def to_proto
+ return unless object
+
+ Temporalio::Api::Schedule::V1::BackfillRequest.new(
+ start_time: serialize_time(object.start_time),
+ end_time: serialize_time(object.end_time),
+ overlap_policy: Temporal::Connection::Serializer::ScheduleOverlapPolicy.new(object.overlap_policy, converter).to_proto
+ )
+ end
+
+ def serialize_time(input_time)
+ return unless input_time
+
+ Google::Protobuf::Timestamp.new.from_time(input_time)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/serializer/base.rb b/lib/temporal/connection/serializer/base.rb
index 9fcd49c5..79e8767a 100644
--- a/lib/temporal/connection/serializer/base.rb
+++ b/lib/temporal/connection/serializer/base.rb
@@ -6,8 +6,9 @@ module Temporal
module Connection
module Serializer
class Base
- def initialize(object)
+ def initialize(object, converter)
@object = object
+ @converter = converter
end
def to_proto
@@ -16,7 +17,7 @@ def to_proto
private
- attr_reader :object
+ attr_reader :object, :converter
end
end
end
diff --git a/lib/temporal/connection/serializer/cancel_timer.rb b/lib/temporal/connection/serializer/cancel_timer.rb
index a51eceb7..99215f04 100644
--- a/lib/temporal/connection/serializer/cancel_timer.rb
+++ b/lib/temporal/connection/serializer/cancel_timer.rb
@@ -5,10 +5,10 @@ module Connection
module Serializer
class CancelTimer < Base
def to_proto
- Temporal::Api::Command::V1::Command.new(
- command_type: Temporal::Api::Enums::V1::CommandType::COMMAND_TYPE_CANCEL_TIMER,
+ Temporalio::Api::Command::V1::Command.new(
+ command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_CANCEL_TIMER,
cancel_timer_command_attributes:
- Temporal::Api::Command::V1::CancelTimerCommandAttributes.new(
+ Temporalio::Api::Command::V1::CancelTimerCommandAttributes.new(
timer_id: object.timer_id.to_s
)
)
diff --git a/lib/temporal/connection/serializer/complete_workflow.rb b/lib/temporal/connection/serializer/complete_workflow.rb
index f228dbee..8eaa3ed4 100644
--- a/lib/temporal/connection/serializer/complete_workflow.rb
+++ b/lib/temporal/connection/serializer/complete_workflow.rb
@@ -1,18 +1,15 @@
require 'temporal/connection/serializer/base'
-require 'temporal/concerns/payloads'
module Temporal
module Connection
module Serializer
class CompleteWorkflow < Base
- include Concerns::Payloads
-
def to_proto
- Temporal::Api::Command::V1::Command.new(
- command_type: Temporal::Api::Enums::V1::CommandType::COMMAND_TYPE_COMPLETE_WORKFLOW_EXECUTION,
+ Temporalio::Api::Command::V1::Command.new(
+ command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_COMPLETE_WORKFLOW_EXECUTION,
complete_workflow_execution_command_attributes:
- Temporal::Api::Command::V1::CompleteWorkflowExecutionCommandAttributes.new(
- result: to_result_payloads(object.result)
+ Temporalio::Api::Command::V1::CompleteWorkflowExecutionCommandAttributes.new(
+ result: converter.to_result_payloads(object.result)
)
)
end
diff --git a/lib/temporal/connection/serializer/continue_as_new.rb b/lib/temporal/connection/serializer/continue_as_new.rb
index 2d1e588c..989ff2a9 100644
--- a/lib/temporal/connection/serializer/continue_as_new.rb
+++ b/lib/temporal/connection/serializer/continue_as_new.rb
@@ -1,25 +1,24 @@
require 'temporal/connection/serializer/base'
require 'temporal/connection/serializer/retry_policy'
-require 'temporal/concerns/payloads'
module Temporal
module Connection
module Serializer
class ContinueAsNew < Base
- include Concerns::Payloads
-
def to_proto
- Temporal::Api::Command::V1::Command.new(
- command_type: Temporal::Api::Enums::V1::CommandType::COMMAND_TYPE_CONTINUE_AS_NEW_WORKFLOW_EXECUTION,
+ Temporalio::Api::Command::V1::Command.new(
+ command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_CONTINUE_AS_NEW_WORKFLOW_EXECUTION,
continue_as_new_workflow_execution_command_attributes:
- Temporal::Api::Command::V1::ContinueAsNewWorkflowExecutionCommandAttributes.new(
- workflow_type: Temporal::Api::Common::V1::WorkflowType.new(name: object.workflow_type),
- task_queue: Temporal::Api::TaskQueue::V1::TaskQueue.new(name: object.task_queue),
- input: to_payloads(object.input),
- workflow_run_timeout: object.timeouts[:execution],
+ Temporalio::Api::Command::V1::ContinueAsNewWorkflowExecutionCommandAttributes.new(
+ workflow_type: Temporalio::Api::Common::V1::WorkflowType.new(name: object.workflow_type),
+ task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new(name: object.task_queue),
+ input: converter.to_payloads(object.input),
+ workflow_run_timeout: object.timeouts[:run],
workflow_task_timeout: object.timeouts[:task],
- retry_policy: Temporal::Connection::Serializer::RetryPolicy.new(object.retry_policy).to_proto,
- header: serialize_headers(object.headers)
+ retry_policy: Temporal::Connection::Serializer::RetryPolicy.new(object.retry_policy, converter).to_proto,
+ header: serialize_headers(object.headers),
+ memo: serialize_memo(object.memo),
+ search_attributes: serialize_search_attributes(object.search_attributes),
)
)
end
@@ -29,7 +28,19 @@ def to_proto
def serialize_headers(headers)
return unless headers
- Temporal::Api::Common::V1::Header.new(fields: object.headers)
+ Temporalio::Api::Common::V1::Header.new(fields: converter.to_payload_map(headers))
+ end
+
+ def serialize_memo(memo)
+ return unless memo
+
+ Temporalio::Api::Common::V1::Memo.new(fields: converter.to_payload_map(memo))
+ end
+
+ def serialize_search_attributes(search_attributes)
+ return unless search_attributes
+
+ Temporalio::Api::Common::V1::SearchAttributes.new(indexed_fields: converter.to_payload_map_without_codec(search_attributes))
end
end
end
diff --git a/lib/temporal/connection/serializer/fail_workflow.rb b/lib/temporal/connection/serializer/fail_workflow.rb
index 0cc79725..2bedb688 100644
--- a/lib/temporal/connection/serializer/fail_workflow.rb
+++ b/lib/temporal/connection/serializer/fail_workflow.rb
@@ -6,11 +6,11 @@ module Connection
module Serializer
class FailWorkflow < Base
def to_proto
- Temporal::Api::Command::V1::Command.new(
- command_type: Temporal::Api::Enums::V1::CommandType::COMMAND_TYPE_FAIL_WORKFLOW_EXECUTION,
+ Temporalio::Api::Command::V1::Command.new(
+ command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_FAIL_WORKFLOW_EXECUTION,
fail_workflow_execution_command_attributes:
- Temporal::Api::Command::V1::FailWorkflowExecutionCommandAttributes.new(
- failure: Failure.new(object.exception).to_proto
+ Temporalio::Api::Command::V1::FailWorkflowExecutionCommandAttributes.new(
+ failure: Failure.new(object.exception, converter).to_proto
)
)
end
diff --git a/lib/temporal/connection/serializer/failure.rb b/lib/temporal/connection/serializer/failure.rb
index 15dfc555..2d17e949 100644
--- a/lib/temporal/connection/serializer/failure.rb
+++ b/lib/temporal/connection/serializer/failure.rb
@@ -1,19 +1,38 @@
require 'temporal/connection/serializer/base'
-require 'temporal/concerns/payloads'
module Temporal
module Connection
module Serializer
class Failure < Base
- include Concerns::Payloads
+ def initialize(error, converter, serialize_whole_error: false, max_bytes: 200_000)
+ @serialize_whole_error = serialize_whole_error
+ @max_bytes = max_bytes
+ super(error, converter)
+ end
def to_proto
- Temporal::Api::Failure::V1::Failure.new(
+ if @serialize_whole_error
+ details = converter.to_details_payloads(object)
+ if details.payloads.first.data.size > @max_bytes
+ Temporal.logger.error(
+ "Could not serialize exception because it's too large, so we are using a fallback that may not "\
+ "deserialize correctly on the client. First #{@max_bytes} bytes:\n" \
+ "#{details.payloads.first.data[0..@max_bytes - 1]}",
+ {unserializable_error: object.class.name}
+ )
+ # Fallback to a more conservative serialization if the payload is too big to avoid
+ # sending a huge amount of data to temporal and putting it in the history.
+ details = converter.to_details_payloads(object.message)
+ end
+ else
+ details = converter.to_details_payloads(object.message)
+ end
+ Temporalio::Api::Failure::V1::Failure.new(
message: object.message,
stack_trace: stack_trace_from(object.backtrace),
- application_failure_info: Temporal::Api::Failure::V1::ApplicationFailureInfo.new(
+ application_failure_info: Temporalio::Api::Failure::V1::ApplicationFailureInfo.new(
type: object.class.name,
- details: to_details_payloads(object.message)
+ details: details
)
)
end
diff --git a/lib/temporal/connection/serializer/query_answer.rb b/lib/temporal/connection/serializer/query_answer.rb
new file mode 100644
index 00000000..0c98b010
--- /dev/null
+++ b/lib/temporal/connection/serializer/query_answer.rb
@@ -0,0 +1,16 @@
+require 'temporal/connection/serializer/base'
+
+module Temporal
+ module Connection
+ module Serializer
+ class QueryAnswer < Base
+ def to_proto
+ Temporalio::Api::Query::V1::WorkflowQueryResult.new(
+ result_type: Temporalio::Api::Enums::V1::QueryResultType::QUERY_RESULT_TYPE_ANSWERED,
+ answer: converter.to_query_payloads(object.result)
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/serializer/query_failure.rb b/lib/temporal/connection/serializer/query_failure.rb
new file mode 100644
index 00000000..28256ed5
--- /dev/null
+++ b/lib/temporal/connection/serializer/query_failure.rb
@@ -0,0 +1,16 @@
+require 'temporal/connection/serializer/base'
+
+module Temporal
+ module Connection
+ module Serializer
+ class QueryFailure < Base
+ def to_proto
+ Temporalio::Api::Query::V1::WorkflowQueryResult.new(
+ result_type: Temporalio::Api::Enums::V1::QueryResultType::QUERY_RESULT_TYPE_FAILED,
+ error_message: object.error.message
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/serializer/record_marker.rb b/lib/temporal/connection/serializer/record_marker.rb
index 133d79dc..99fddb8c 100644
--- a/lib/temporal/connection/serializer/record_marker.rb
+++ b/lib/temporal/connection/serializer/record_marker.rb
@@ -1,20 +1,17 @@
require 'temporal/connection/serializer/base'
-require 'temporal/concerns/payloads'
module Temporal
module Connection
module Serializer
class RecordMarker < Base
- include Concerns::Payloads
-
def to_proto
- Temporal::Api::Command::V1::Command.new(
- command_type: Temporal::Api::Enums::V1::CommandType::COMMAND_TYPE_RECORD_MARKER,
+ Temporalio::Api::Command::V1::Command.new(
+ command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_RECORD_MARKER,
record_marker_command_attributes:
- Temporal::Api::Command::V1::RecordMarkerCommandAttributes.new(
+ Temporalio::Api::Command::V1::RecordMarkerCommandAttributes.new(
marker_name: object.name,
details: {
- 'data' => to_details_payloads(object.details)
+ 'data' => converter.to_details_payloads(object.details)
}
)
)
diff --git a/lib/temporal/connection/serializer/request_activity_cancellation.rb b/lib/temporal/connection/serializer/request_activity_cancellation.rb
index 2cf51a65..fb341270 100644
--- a/lib/temporal/connection/serializer/request_activity_cancellation.rb
+++ b/lib/temporal/connection/serializer/request_activity_cancellation.rb
@@ -5,10 +5,10 @@ module Connection
module Serializer
class RequestActivityCancellation < Base
def to_proto
- Temporal::Api::Command::V1::Command.new(
- command_type: Temporal::Api::Enums::V1::CommandType::COMMAND_TYPE_REQUEST_CANCEL_ACTIVITY_TASK,
+ Temporalio::Api::Command::V1::Command.new(
+ command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_REQUEST_CANCEL_ACTIVITY_TASK,
request_cancel_activity_task_command_attributes:
- Temporal::Api::Command::V1::RequestCancelActivityTaskCommandAttributes.new(
+ Temporalio::Api::Command::V1::RequestCancelActivityTaskCommandAttributes.new(
scheduled_event_id: object.activity_id.to_i
)
)
diff --git a/lib/temporal/connection/serializer/retry_policy.rb b/lib/temporal/connection/serializer/retry_policy.rb
index 58d42ab1..bea53786 100644
--- a/lib/temporal/connection/serializer/retry_policy.rb
+++ b/lib/temporal/connection/serializer/retry_policy.rb
@@ -16,7 +16,7 @@ def to_proto
non_retryable_error_types: non_retriable_errors,
}.compact
- Temporal::Api::Common::V1::RetryPolicy.new(options)
+ Temporalio::Api::Common::V1::RetryPolicy.new(options)
end
end
end
diff --git a/lib/temporal/connection/serializer/schedule.rb b/lib/temporal/connection/serializer/schedule.rb
new file mode 100644
index 00000000..3e2fc264
--- /dev/null
+++ b/lib/temporal/connection/serializer/schedule.rb
@@ -0,0 +1,22 @@
+require "temporal/connection/serializer/base"
+require "temporal/connection/serializer/schedule_spec"
+require "temporal/connection/serializer/schedule_action"
+require "temporal/connection/serializer/schedule_policies"
+require "temporal/connection/serializer/schedule_state"
+
+module Temporal
+ module Connection
+ module Serializer
+ class Schedule < Base
+ def to_proto
+ Temporalio::Api::Schedule::V1::Schedule.new(
+ spec: Temporal::Connection::Serializer::ScheduleSpec.new(object.spec, converter).to_proto,
+ action: Temporal::Connection::Serializer::ScheduleAction.new(object.action, converter).to_proto,
+ policies: Temporal::Connection::Serializer::SchedulePolicies.new(object.policies, converter).to_proto,
+ state: Temporal::Connection::Serializer::ScheduleState.new(object.state, converter).to_proto
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/serializer/schedule_action.rb b/lib/temporal/connection/serializer/schedule_action.rb
new file mode 100644
index 00000000..b79942be
--- /dev/null
+++ b/lib/temporal/connection/serializer/schedule_action.rb
@@ -0,0 +1,40 @@
+require "temporal/connection/serializer/base"
+
+module Temporal
+ module Connection
+ module Serializer
+ class ScheduleAction < Base
+ def to_proto
+ unless object.is_a?(Temporal::Schedule::StartWorkflowAction)
+ raise ArgumentError, "Unknown action type #{object.class}"
+ end
+
+ Temporalio::Api::Schedule::V1::ScheduleAction.new(
+ start_workflow: Temporalio::Api::Workflow::V1::NewWorkflowExecutionInfo.new(
+ workflow_id: object.workflow_id,
+ workflow_type: Temporalio::Api::Common::V1::WorkflowType.new(
+ name: object.name
+ ),
+ task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new(
+ name: object.task_queue
+ ),
+ input: converter.to_payloads(object.input),
+ workflow_execution_timeout: object.execution_timeout,
+ workflow_run_timeout: object.run_timeout,
+ workflow_task_timeout: object.task_timeout,
+ header: Temporalio::Api::Common::V1::Header.new(
+ fields: converter.to_payload_map(object.headers || {})
+ ),
+ memo: Temporalio::Api::Common::V1::Memo.new(
+ fields: converter.to_payload_map(object.memo || {})
+ ),
+ search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new(
+ indexed_fields: converter.to_payload_map_without_codec(object.search_attributes || {})
+ )
+ )
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/serializer/schedule_activity.rb b/lib/temporal/connection/serializer/schedule_activity.rb
index 93d3a207..b3640639 100644
--- a/lib/temporal/connection/serializer/schedule_activity.rb
+++ b/lib/temporal/connection/serializer/schedule_activity.rb
@@ -1,28 +1,24 @@
require 'temporal/connection/serializer/base'
require 'temporal/connection/serializer/retry_policy'
-require 'temporal/concerns/payloads'
module Temporal
module Connection
module Serializer
class ScheduleActivity < Base
- include Concerns::Payloads
-
def to_proto
- Temporal::Api::Command::V1::Command.new(
- command_type: Temporal::Api::Enums::V1::CommandType::COMMAND_TYPE_SCHEDULE_ACTIVITY_TASK,
+ Temporalio::Api::Command::V1::Command.new(
+ command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_SCHEDULE_ACTIVITY_TASK,
schedule_activity_task_command_attributes:
- Temporal::Api::Command::V1::ScheduleActivityTaskCommandAttributes.new(
+ Temporalio::Api::Command::V1::ScheduleActivityTaskCommandAttributes.new(
activity_id: object.activity_id.to_s,
- activity_type: Temporal::Api::Common::V1::ActivityType.new(name: object.activity_type),
- input: to_payloads(object.input),
- namespace: object.namespace,
- task_queue: Temporal::Api::TaskQueue::V1::TaskQueue.new(name: object.task_queue),
+ activity_type: Temporalio::Api::Common::V1::ActivityType.new(name: object.activity_type),
+ input: converter.to_payloads(object.input),
+ task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new(name: object.task_queue),
schedule_to_close_timeout: object.timeouts[:schedule_to_close],
schedule_to_start_timeout: object.timeouts[:schedule_to_start],
start_to_close_timeout: object.timeouts[:start_to_close],
heartbeat_timeout: object.timeouts[:heartbeat],
- retry_policy: Temporal::Connection::Serializer::RetryPolicy.new(object.retry_policy).to_proto,
+ retry_policy: Temporal::Connection::Serializer::RetryPolicy.new(object.retry_policy, converter).to_proto,
header: serialize_headers(object.headers)
)
)
@@ -33,7 +29,7 @@ def to_proto
def serialize_headers(headers)
return unless headers
- Temporal::Api::Common::V1::Header.new(fields: object.headers)
+ Temporalio::Api::Common::V1::Header.new(fields: converter.to_payload_map(headers))
end
end
end
diff --git a/lib/temporal/connection/serializer/schedule_overlap_policy.rb b/lib/temporal/connection/serializer/schedule_overlap_policy.rb
new file mode 100644
index 00000000..a866c8ee
--- /dev/null
+++ b/lib/temporal/connection/serializer/schedule_overlap_policy.rb
@@ -0,0 +1,26 @@
+require "temporal/connection/serializer/base"
+
+module Temporal
+ module Connection
+ module Serializer
+ class ScheduleOverlapPolicy < Base
+ SCHEDULE_OVERLAP_POLICY = {
+ skip: Temporalio::Api::Enums::V1::ScheduleOverlapPolicy::SCHEDULE_OVERLAP_POLICY_SKIP,
+ buffer_one: Temporalio::Api::Enums::V1::ScheduleOverlapPolicy::SCHEDULE_OVERLAP_POLICY_BUFFER_ONE,
+ buffer_all: Temporalio::Api::Enums::V1::ScheduleOverlapPolicy::SCHEDULE_OVERLAP_POLICY_BUFFER_ALL,
+ cancel_other: Temporalio::Api::Enums::V1::ScheduleOverlapPolicy::SCHEDULE_OVERLAP_POLICY_CANCEL_OTHER,
+ terminate_other: Temporalio::Api::Enums::V1::ScheduleOverlapPolicy::SCHEDULE_OVERLAP_POLICY_TERMINATE_OTHER,
+ allow_all: Temporalio::Api::Enums::V1::ScheduleOverlapPolicy::SCHEDULE_OVERLAP_POLICY_ALLOW_ALL
+ }.freeze
+
+ def to_proto
+ return unless object
+
+ SCHEDULE_OVERLAP_POLICY.fetch(object) do
+ raise ArgumentError, "Unknown schedule overlap policy specified: #{object}"
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/serializer/schedule_policies.rb b/lib/temporal/connection/serializer/schedule_policies.rb
new file mode 100644
index 00000000..42558899
--- /dev/null
+++ b/lib/temporal/connection/serializer/schedule_policies.rb
@@ -0,0 +1,20 @@
+require "temporal/connection/serializer/base"
+require "temporal/connection/serializer/schedule_overlap_policy"
+
+module Temporal
+ module Connection
+ module Serializer
+ class SchedulePolicies < Base
+ def to_proto
+ return unless object
+
+ Temporalio::Api::Schedule::V1::SchedulePolicies.new(
+ overlap_policy: Temporal::Connection::Serializer::ScheduleOverlapPolicy.new(object.overlap_policy, converter).to_proto,
+ catchup_window: object.catchup_window,
+ pause_on_failure: object.pause_on_failure
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/serializer/schedule_spec.rb b/lib/temporal/connection/serializer/schedule_spec.rb
new file mode 100644
index 00000000..7fb07b48
--- /dev/null
+++ b/lib/temporal/connection/serializer/schedule_spec.rb
@@ -0,0 +1,45 @@
+require "temporal/connection/serializer/base"
+
+module Temporal
+ module Connection
+ module Serializer
+ class ScheduleSpec < Base
+ def to_proto
+ return unless object
+
+ Temporalio::Api::Schedule::V1::ScheduleSpec.new(
+ cron_string: object.cron_expressions,
+ interval: object.intervals.map do |interval|
+ Temporalio::Api::Schedule::V1::IntervalSpec.new(
+ interval: interval.every,
+ phase: interval.offset
+ )
+ end,
+ calendar: object.calendars.map do |calendar|
+ Temporalio::Api::Schedule::V1::CalendarSpec.new(
+ second: calendar.second,
+ minute: calendar.minute,
+ hour: calendar.hour,
+ day_of_month: calendar.day_of_month,
+ month: calendar.month,
+ year: calendar.year,
+ day_of_week: calendar.day_of_week,
+ comment: calendar.comment
+ )
+ end,
+ jitter: object.jitter,
+ timezone_name: object.timezone_name,
+ start_time: serialize_time(object.start_time),
+ end_time: serialize_time(object.end_time)
+ )
+ end
+
+ def serialize_time(input_time)
+ return unless input_time
+
+ Google::Protobuf::Timestamp.new.from_time(input_time)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/serializer/schedule_state.rb b/lib/temporal/connection/serializer/schedule_state.rb
new file mode 100644
index 00000000..9e243de5
--- /dev/null
+++ b/lib/temporal/connection/serializer/schedule_state.rb
@@ -0,0 +1,20 @@
+require "temporal/connection/serializer/base"
+
+module Temporal
+ module Connection
+ module Serializer
+ class ScheduleState < Base
+ def to_proto
+ return unless object
+
+ Temporalio::Api::Schedule::V1::ScheduleState.new(
+ notes: object.notes,
+ paused: object.paused,
+ limited_actions: object.limited_actions,
+ remaining_actions: object.remaining_actions
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/serializer/signal_external_workflow.rb b/lib/temporal/connection/serializer/signal_external_workflow.rb
new file mode 100644
index 00000000..ff229ddb
--- /dev/null
+++ b/lib/temporal/connection/serializer/signal_external_workflow.rb
@@ -0,0 +1,30 @@
+require 'temporal/connection/serializer/base'
+
+module Temporal
+ module Connection
+ module Serializer
+ class SignalExternalWorkflow < Base
+ def to_proto
+ Temporalio::Api::Command::V1::Command.new(
+ command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_SIGNAL_EXTERNAL_WORKFLOW_EXECUTION,
+ signal_external_workflow_execution_command_attributes:
+ Temporalio::Api::Command::V1::SignalExternalWorkflowExecutionCommandAttributes.new(
+ namespace: object.namespace,
+ execution: serialize_execution(object.execution),
+ signal_name: object.signal_name,
+ input: converter.to_signal_payloads(object.input),
+ control: "", # deprecated
+ child_workflow_only: object.child_workflow_only
+ )
+ )
+ end
+
+ private
+
+ def serialize_execution(execution)
+ Temporalio::Api::Common::V1::WorkflowExecution.new(workflow_id: execution[:workflow_id], run_id: execution[:run_id])
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/serializer/start_child_workflow.rb b/lib/temporal/connection/serializer/start_child_workflow.rb
index 55312e50..dcb2fbf0 100644
--- a/lib/temporal/connection/serializer/start_child_workflow.rb
+++ b/lib/temporal/connection/serializer/start_child_workflow.rb
@@ -1,28 +1,37 @@
require 'temporal/connection/serializer/base'
require 'temporal/connection/serializer/retry_policy'
-require 'temporal/concerns/payloads'
+require 'temporal/connection/serializer/workflow_id_reuse_policy'
module Temporal
module Connection
module Serializer
class StartChildWorkflow < Base
- include Concerns::Payloads
+ PARENT_CLOSE_POLICY = {
+ terminate: Temporalio::Api::Enums::V1::ParentClosePolicy::PARENT_CLOSE_POLICY_TERMINATE,
+ abandon: Temporalio::Api::Enums::V1::ParentClosePolicy::PARENT_CLOSE_POLICY_ABANDON,
+ request_cancel: Temporalio::Api::Enums::V1::ParentClosePolicy::PARENT_CLOSE_POLICY_REQUEST_CANCEL,
+ }.freeze
def to_proto
- Temporal::Api::Command::V1::Command.new(
- command_type: Temporal::Api::Enums::V1::CommandType::COMMAND_TYPE_START_CHILD_WORKFLOW_EXECUTION,
+ Temporalio::Api::Command::V1::Command.new(
+ command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_START_CHILD_WORKFLOW_EXECUTION,
start_child_workflow_execution_command_attributes:
- Temporal::Api::Command::V1::StartChildWorkflowExecutionCommandAttributes.new(
+ Temporalio::Api::Command::V1::StartChildWorkflowExecutionCommandAttributes.new(
namespace: object.namespace,
workflow_id: object.workflow_id.to_s,
- workflow_type: Temporal::Api::Common::V1::WorkflowType.new(name: object.workflow_type),
- task_queue: Temporal::Api::TaskQueue::V1::TaskQueue.new(name: object.task_queue),
- input: to_payloads(object.input),
+ workflow_type: Temporalio::Api::Common::V1::WorkflowType.new(name: object.workflow_type),
+ task_queue: Temporalio::Api::TaskQueue::V1::TaskQueue.new(name: object.task_queue),
+ input: converter.to_payloads(object.input),
workflow_execution_timeout: object.timeouts[:execution],
workflow_run_timeout: object.timeouts[:run],
workflow_task_timeout: object.timeouts[:task],
- retry_policy: Temporal::Connection::Serializer::RetryPolicy.new(object.retry_policy).to_proto,
- header: serialize_headers(object.headers)
+ retry_policy: Temporal::Connection::Serializer::RetryPolicy.new(object.retry_policy, converter).to_proto,
+ parent_close_policy: serialize_parent_close_policy(object.parent_close_policy),
+ header: serialize_headers(object.headers),
+ cron_schedule: object.cron_schedule,
+ memo: serialize_memo(object.memo),
+ workflow_id_reuse_policy: Temporal::Connection::Serializer::WorkflowIdReusePolicy.new(object.workflow_id_reuse_policy, converter).to_proto,
+ search_attributes: serialize_search_attributes(object.search_attributes),
)
)
end
@@ -32,7 +41,29 @@ def to_proto
def serialize_headers(headers)
return unless headers
- Temporal::Api::Common::V1::Header.new(fields: object.headers)
+ Temporalio::Api::Common::V1::Header.new(fields: converter.to_payload_map(headers))
+ end
+
+ def serialize_memo(memo)
+ return unless memo
+
+ Temporalio::Api::Common::V1::Memo.new(fields: converter.to_payload_map(memo))
+ end
+
+ def serialize_parent_close_policy(parent_close_policy)
+ return unless parent_close_policy
+
+ unless PARENT_CLOSE_POLICY.key? parent_close_policy
+ raise ArgumentError, "Unknown parent_close_policy '#{parent_close_policy}' specified"
+ end
+
+ PARENT_CLOSE_POLICY[parent_close_policy]
+ end
+
+ def serialize_search_attributes(search_attributes)
+ return unless search_attributes
+
+ Temporalio::Api::Common::V1::SearchAttributes.new(indexed_fields: converter.to_payload_map_without_codec(search_attributes))
end
end
end
diff --git a/lib/temporal/connection/serializer/start_timer.rb b/lib/temporal/connection/serializer/start_timer.rb
index 9ec313ea..6869dcb1 100644
--- a/lib/temporal/connection/serializer/start_timer.rb
+++ b/lib/temporal/connection/serializer/start_timer.rb
@@ -5,10 +5,10 @@ module Connection
module Serializer
class StartTimer < Base
def to_proto
- Temporal::Api::Command::V1::Command.new(
- command_type: Temporal::Api::Enums::V1::CommandType::COMMAND_TYPE_START_TIMER,
+ Temporalio::Api::Command::V1::Command.new(
+ command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_START_TIMER,
start_timer_command_attributes:
- Temporal::Api::Command::V1::StartTimerCommandAttributes.new(
+ Temporalio::Api::Command::V1::StartTimerCommandAttributes.new(
timer_id: object.timer_id.to_s,
start_to_fire_timeout: object.timeout
)
diff --git a/lib/temporal/connection/serializer/upsert_search_attributes.rb b/lib/temporal/connection/serializer/upsert_search_attributes.rb
new file mode 100644
index 00000000..b1b0395a
--- /dev/null
+++ b/lib/temporal/connection/serializer/upsert_search_attributes.rb
@@ -0,0 +1,21 @@
+require 'temporal/connection/serializer/base'
+
+module Temporal
+ module Connection
+ module Serializer
+ class UpsertSearchAttributes < Base
+ def to_proto
+ Temporalio::Api::Command::V1::Command.new(
+ command_type: Temporalio::Api::Enums::V1::CommandType::COMMAND_TYPE_UPSERT_WORKFLOW_SEARCH_ATTRIBUTES,
+ upsert_workflow_search_attributes_command_attributes:
+ Temporalio::Api::Command::V1::UpsertWorkflowSearchAttributesCommandAttributes.new(
+ search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new(
+ indexed_fields: converter.to_payload_map_without_codec(object.search_attributes || {})
+ ),
+ )
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/connection/serializer/workflow_id_reuse_policy.rb b/lib/temporal/connection/serializer/workflow_id_reuse_policy.rb
new file mode 100644
index 00000000..0c6c71bf
--- /dev/null
+++ b/lib/temporal/connection/serializer/workflow_id_reuse_policy.rb
@@ -0,0 +1,26 @@
+require 'temporal/connection'
+
+module Temporal
+ module Connection
+ module Serializer
+ class WorkflowIdReusePolicy < Base
+
+ WORKFLOW_ID_REUSE_POLICY = {
+ allow_failed: Temporalio::Api::Enums::V1::WorkflowIdReusePolicy::WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE_FAILED_ONLY,
+ allow: Temporalio::Api::Enums::V1::WorkflowIdReusePolicy::WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE,
+ reject: Temporalio::Api::Enums::V1::WorkflowIdReusePolicy::WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE,
+ terminate_if_running: Temporalio::Api::Enums::V1::WorkflowIdReusePolicy::WORKFLOW_ID_REUSE_POLICY_TERMINATE_IF_RUNNING
+ }.freeze
+
+ def to_proto
+ return unless object
+
+ policy = WORKFLOW_ID_REUSE_POLICY[object]
+ raise ArgumentError, "Unknown workflow_id_reuse_policy specified: #{object}" unless policy
+
+ policy
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/converter_wrapper.rb b/lib/temporal/converter_wrapper.rb
new file mode 100644
index 00000000..a14e2abf
--- /dev/null
+++ b/lib/temporal/converter_wrapper.rb
@@ -0,0 +1,87 @@
+# This class provides convenience methods for accessing the converter/codec. It is fully backwards
+# compatible with Temporal::Connection::Converter::Base interface, however it adds new convenience
+# methods specific to different conversion scenarios.
+
+module Temporal
+ class ConverterWrapper
+ def initialize(converter, codec)
+ @converter = converter
+ @codec = codec
+ end
+
+ def from_payloads(payloads)
+ payloads = codec.decodes(payloads)
+ converter.from_payloads(payloads)
+ end
+
+ def from_payload(payload)
+ payload = codec.decode(payload)
+ converter.from_payload(payload)
+ end
+
+ def from_payload_map_without_codec(payload_map)
+ payload_map.map { |key, value| [key, converter.from_payload(value)] }.to_h
+ end
+
+ def from_result_payloads(payloads)
+ from_payloads(payloads)&.first
+ end
+
+ def from_details_payloads(payloads)
+ from_payloads(payloads)&.first
+ end
+
+ def from_signal_payloads(payloads)
+ from_payloads(payloads)&.first
+ end
+
+ def from_query_payloads(payloads)
+ from_payloads(payloads)&.first
+ end
+
+ def from_payload_map(payload_map)
+ payload_map.map { |key, value| [key, from_payload(value)] }.to_h
+ end
+
+ def to_payloads(data)
+ payloads = converter.to_payloads(data)
+ codec.encodes(payloads)
+ end
+
+ def to_payload(data)
+ payload = converter.to_payload(data)
+ codec.encode(payload)
+ end
+
+ def to_payload_map_without_codec(data)
+ # skips the codec step because search attributes don't use this pipeline
+ data.transform_values do |value|
+ converter.to_payload(value)
+ end
+ end
+
+ def to_result_payloads(data)
+ to_payloads([data])
+ end
+
+ def to_details_payloads(data)
+ to_payloads([data])
+ end
+
+ def to_signal_payloads(data)
+ to_payloads([data])
+ end
+
+ def to_query_payloads(data)
+ to_payloads([data])
+ end
+
+ def to_payload_map(data)
+ data.transform_values(&method(:to_payload))
+ end
+
+ private
+
+ attr_reader :converter, :codec
+ end
+end
diff --git a/lib/temporal/error_handler.rb b/lib/temporal/error_handler.rb
index 9702edcb..98fca99f 100644
--- a/lib/temporal/error_handler.rb
+++ b/lib/temporal/error_handler.rb
@@ -1,7 +1,7 @@
module Temporal
module ErrorHandler
- def self.handle(error, metadata: nil)
- Temporal.configuration.error_handlers.each do |handler|
+ def self.handle(error, configuration, metadata: nil)
+ configuration.error_handlers.each do |handler|
handler.call(error, metadata: metadata)
rescue StandardError => e
Temporal.logger.error("Error handler failed", { error: e.inspect })
diff --git a/lib/temporal/errors.rb b/lib/temporal/errors.rb
index 5730224c..1c423a6c 100644
--- a/lib/temporal/errors.rb
+++ b/lib/temporal/errors.rb
@@ -9,18 +9,34 @@ class InternalError < Error; end
# a non-deterministic workflow implementation or the gem's bug
class NonDeterministicWorkflowError < InternalError; end
+ # Indicates a workflow task was encountered that used an unknown SDK flag
+ class UnknownSDKFlagError < InternalError; end
+
# Superclass for misconfiguration/misuse on the client (user) side
class ClientError < Error; end
# Represents any timeout
class TimeoutError < ClientError; end
+ # Represents when a child workflow times out
+ class ChildWorkflowTimeoutError < Error; end
+
+ # Represents when a child workflow is terminated
+ class ChildWorkflowTerminatedError < Error; end
+
# A superclass for activity exceptions raised explicitly
# with the intent to propagate to a workflow
+ # With v2 serialization (set with Temporal::Configuration#use_error_serialization_v2=true) you can
+ # throw any exception from an activity and expect that it can be handled by the workflow.
class ActivityException < ClientError; end
+ # Represents cancellation of a non-started activity
+ class ActivityCanceled < ActivityException; end
+
class ActivityNotRegistered < ClientError; end
class WorkflowNotRegistered < ClientError; end
+ class SecondDynamicActivityError < ClientError; end
+ class SecondDynamicWorkflowError < ClientError; end
class ApiError < Error; end
@@ -36,27 +52,40 @@ class WorkflowCanceled < WorkflowError; end
# Errors where the workflow run didn't complete but not an error for the whole workflow.
class WorkflowRunError < Error; end
+
class WorkflowRunContinuedAsNew < WorkflowRunError
attr_reader :new_run_id
+
def initialize(new_run_id:)
super
@new_run_id = new_run_id
end
end
+ # Once the workflow succeeds, fails, or continues as new, you can't issue any other commands such as
+ # scheduling an activity. This error is thrown if you try, before we report completion back to the server.
+ # This could happen due to activity futures that aren't awaited before the workflow closes,
+ # calling workflow.continue_as_new, workflow.complete, or workflow.fail in the middle of your workflow code,
+ # or an internal framework bug.
+ class WorkflowAlreadyCompletingError < InternalError; end
+
class WorkflowExecutionAlreadyStartedFailure < ApiError
attr_reader :run_id
- def initialize(message, run_id)
+ def initialize(message, run_id = nil)
super(message)
@run_id = run_id
end
end
+
class NamespaceNotActiveFailure < ApiError; end
class ClientVersionNotSupportedFailure < ApiError; end
class FeatureVersionNotSupportedFailure < ApiError; end
class NamespaceAlreadyExistsFailure < ApiError; end
class CancellationAlreadyRequestedFailure < ApiError; end
- class QueryFailedFailure < ApiError; end
- class UnexpectedResponse < ApiError; end
+ class QueryFailed < ApiError; end
+
+ class SearchAttributeAlreadyExistsFailure < ApiError; end
+ class SearchAttributeFailure < ApiError; end
+ class InvalidSearchAttributeTypeFailure < ClientError; end
end
diff --git a/lib/temporal/executable_lookup.rb b/lib/temporal/executable_lookup.rb
index 651951b3..88d85bf4 100644
--- a/lib/temporal/executable_lookup.rb
+++ b/lib/temporal/executable_lookup.rb
@@ -1,3 +1,5 @@
+require 'temporal/errors'
+
# This class is responsible for matching an executable (activity or workflow) name
# to a class implementing it.
#
@@ -6,20 +8,39 @@
#
module Temporal
class ExecutableLookup
+
+ class SecondDynamicExecutableError < StandardError
+ attr_reader :previous_executable_name
+
+ def initialize(previous_executable_name)
+ @previous_executable_name = previous_executable_name
+ end
+ end
+
def initialize
@executables = {}
end
+ # Register an executable to call as a fallback when one of that name isn't registered.
+ def add_dynamic(name, executable)
+ if @fallback_executable_name
+ raise SecondDynamicExecutableError, @fallback_executable_name
+ end
+
+ @fallback_executable = executable
+ @fallback_executable_name = name
+ end
+
def add(name, executable)
executables[name] = executable
end
def find(name)
- executables[name]
+ executables[name] || @fallback_executable
end
private
- attr_reader :executables
+ attr_reader :executables, :fallback_executable, :fallback_executable_name
end
end
diff --git a/lib/temporal/execution_options.rb b/lib/temporal/execution_options.rb
index 6fcaf371..d3319cb8 100644
--- a/lib/temporal/execution_options.rb
+++ b/lib/temporal/execution_options.rb
@@ -3,7 +3,8 @@
module Temporal
class ExecutionOptions
- attr_reader :name, :namespace, :task_queue, :retry_policy, :timeouts, :headers
+ attr_reader :name, :namespace, :task_queue, :retry_policy, :timeouts, :headers, :memo, :search_attributes,
+ :start_delay
def initialize(object, options, defaults = nil)
# Options are treated as overrides and take precedence
@@ -13,6 +14,9 @@ def initialize(object, options, defaults = nil)
@retry_policy = options[:retry_policy] || {}
@timeouts = options[:timeouts] || {}
@headers = options[:headers] || {}
+ @memo = options[:memo] || {}
+ @search_attributes = options[:search_attributes] || {}
+ @start_delay = options[:start_delay] || 0
# For Temporal::Workflow and Temporal::Activity use defined values as the next option
if has_executable_concern?(object)
@@ -29,6 +33,7 @@ def initialize(object, options, defaults = nil)
@task_queue ||= defaults.task_queue
@timeouts = defaults.timeouts.merge(@timeouts)
@headers = defaults.headers.merge(@headers)
+ @search_attributes = defaults.search_attributes.merge(@search_attributes)
end
if @retry_policy.empty?
@@ -48,6 +53,20 @@ def task_list
private
def has_executable_concern?(object)
+ if object.is_a?(String)
+ # NOTE: When object is a String, Object#singleton_class mutates it and
+ # screws up C extension class detection used in older versions of
+ # the protobuf library. This was fixed in protobuf 3.20.0-rc1
+ # via https://github.com/protocolbuffers/protobuf/pull/9342.
+ #
+ # Creating a duplicate of this object prevents the mutation of
+ # the original object which will be put into a protobuf payload
+ # before being sent to Temporal server. Because duplication fails
+ # when Sorbet final classes are used, duplication is limited only
+ # to String classes.
+ object = object.dup
+ end
+
object.singleton_class.included_modules.include?(Concerns::Executable)
rescue TypeError
false
diff --git a/lib/temporal/metadata.rb b/lib/temporal/metadata.rb
index e39f8da9..5439029f 100644
--- a/lib/temporal/metadata.rb
+++ b/lib/temporal/metadata.rb
@@ -2,41 +2,12 @@
require 'temporal/metadata/activity'
require 'temporal/metadata/workflow'
require 'temporal/metadata/workflow_task'
-require 'temporal/concerns/payloads'
module Temporal
module Metadata
- ACTIVITY_TYPE = :activity
- WORKFLOW_TASK_TYPE = :workflow_task
- WORKFLOW_TYPE = :workflow
class << self
- include Concerns::Payloads
-
- def generate(type, data, namespace = nil)
- case type
- when ACTIVITY_TYPE
- activity_metadata_from(data, namespace)
- when WORKFLOW_TASK_TYPE
- workflow_task_metadata_from(data, namespace)
- when WORKFLOW_TYPE
- workflow_metadata_from(data)
- else
- raise InternalError, 'Unsupported metadata type'
- end
- end
-
- private
-
- def headers(fields)
- result = {}
- fields.each do |field, payload|
- result[field] = from_payload(payload)
- end
- result
- end
-
- def activity_metadata_from(task, namespace)
+ def generate_activity_metadata(task, namespace, converter)
Metadata::Activity.new(
namespace: namespace,
id: task.activity_id,
@@ -46,12 +17,17 @@ def activity_metadata_from(task, namespace)
workflow_run_id: task.workflow_execution.run_id,
workflow_id: task.workflow_execution.workflow_id,
workflow_name: task.workflow_type.name,
- headers: headers(task.header&.fields),
- heartbeat_details: from_details_payloads(task.heartbeat_details)
+ headers: converter.from_payload_map(task.header&.fields || {}),
+ heartbeat_details: converter.from_details_payloads(task.heartbeat_details),
+ scheduled_at: task.scheduled_time.to_time,
+ current_attempt_scheduled_at: task.current_attempt_scheduled_time.to_time,
+ heartbeat_timeout: task.heartbeat_timeout.seconds
)
end
- def workflow_task_metadata_from(task, namespace)
+ # @param task [Temporalio::Api::WorkflowService::V1::PollWorkflowTaskQueueResponse]
+ # @param namespace [String]
+ def generate_workflow_task_metadata(task, namespace)
Metadata::WorkflowTask.new(
namespace: namespace,
id: task.started_event_id,
@@ -63,12 +39,21 @@ def workflow_task_metadata_from(task, namespace)
)
end
- def workflow_metadata_from(event)
+ # @param event [Temporal::Workflow::History::Event] Workflow started history event
+ # @param task_metadata [Temporal::Metadata::WorkflowTask] workflow task metadata
+ def generate_workflow_metadata(event, task_metadata, converter)
Metadata::Workflow.new(
- name: event.workflow_type.name,
- run_id: event.original_execution_run_id,
- attempt: event.attempt,
- headers: headers(event.header&.fields)
+ name: event.attributes.workflow_type.name,
+ id: task_metadata.workflow_id,
+ run_id: event.attributes.original_execution_run_id,
+ parent_id: event.attributes.parent_workflow_execution&.workflow_id,
+ parent_run_id: event.attributes.parent_workflow_execution&.run_id,
+ attempt: event.attributes.attempt,
+ namespace: task_metadata.namespace,
+ task_queue: event.attributes.task_queue.name,
+ headers: converter.from_payload_map(event.attributes.header&.fields || {}),
+ run_started_at: event.timestamp,
+ memo: converter.from_payload_map(event.attributes.memo&.fields || {}),
)
end
end
diff --git a/lib/temporal/metadata/activity.rb b/lib/temporal/metadata/activity.rb
index c7e7d814..d7afea87 100644
--- a/lib/temporal/metadata/activity.rb
+++ b/lib/temporal/metadata/activity.rb
@@ -3,9 +3,9 @@
module Temporal
module Metadata
class Activity < Base
- attr_reader :namespace, :id, :name, :task_token, :attempt, :workflow_run_id, :workflow_id, :workflow_name, :headers, :heartbeat_details
+ attr_reader :namespace, :id, :name, :task_token, :attempt, :workflow_run_id, :workflow_id, :workflow_name, :headers, :heartbeat_details, :scheduled_at, :current_attempt_scheduled_at, :heartbeat_timeout
- def initialize(namespace:, id:, name:, task_token:, attempt:, workflow_run_id:, workflow_id:, workflow_name:, headers: {}, heartbeat_details:)
+ def initialize(namespace:, id:, name:, task_token:, attempt:, workflow_run_id:, workflow_id:, workflow_name:, headers: {}, heartbeat_details:, scheduled_at:, current_attempt_scheduled_at:, heartbeat_timeout:)
@namespace = namespace
@id = id
@name = name
@@ -16,6 +16,9 @@ def initialize(namespace:, id:, name:, task_token:, attempt:, workflow_run_id:,
@workflow_name = workflow_name
@headers = headers
@heartbeat_details = heartbeat_details
+ @scheduled_at = scheduled_at
+ @current_attempt_scheduled_at = current_attempt_scheduled_at
+ @heartbeat_timeout = heartbeat_timeout
freeze
end
@@ -32,7 +35,9 @@ def to_h
'workflow_run_id' => workflow_run_id,
'activity_id' => id,
'activity_name' => name,
- 'attempt' => attempt
+ 'attempt' => attempt,
+ 'scheduled_at' => scheduled_at.to_s,
+ 'current_attempt_scheduled_at' => current_attempt_scheduled_at.to_s
}
end
end
diff --git a/lib/temporal/metadata/workflow.rb b/lib/temporal/metadata/workflow.rb
index 86d14de4..e912391d 100644
--- a/lib/temporal/metadata/workflow.rb
+++ b/lib/temporal/metadata/workflow.rb
@@ -3,13 +3,20 @@
module Temporal
module Metadata
class Workflow < Base
- attr_reader :name, :run_id, :attempt, :headers
+ attr_reader :namespace, :id, :name, :run_id, :parent_id, :parent_run_id, :attempt, :task_queue, :headers, :run_started_at, :memo
- def initialize(name:, run_id:, attempt:, headers: {})
+ def initialize(namespace:, id:, name:, run_id:, parent_id:, parent_run_id:, attempt:, task_queue:, headers:, run_started_at:, memo:)
+ @namespace = namespace
+ @id = id
@name = name
@run_id = run_id
+ @parent_id = parent_id
+ @parent_run_id = parent_run_id
@attempt = attempt
+ @task_queue = task_queue
@headers = headers
+ @run_started_at = run_started_at
+ @memo = memo
freeze
end
@@ -20,9 +27,16 @@ def workflow?
def to_h
{
+ 'namespace' => namespace,
+ 'workflow_id' => id,
'workflow_name' => name,
'workflow_run_id' => run_id,
- 'attempt' => attempt
+ 'parent_workflow_id' => parent_id,
+ 'parent_workflow_run_id' => parent_run_id,
+ 'attempt' => attempt,
+ 'task_queue' => task_queue,
+ 'run_started_at' => run_started_at.to_f,
+ 'memo' => memo,
}
end
end
diff --git a/lib/temporal/metric_keys.rb b/lib/temporal/metric_keys.rb
new file mode 100644
index 00000000..e945f0b6
--- /dev/null
+++ b/lib/temporal/metric_keys.rb
@@ -0,0 +1,16 @@
+module Temporal
+ module MetricKeys
+ ACTIVITY_POLLER_TIME_SINCE_LAST_POLL = 'activity_poller.time_since_last_poll'.freeze
+ ACTIVITY_POLLER_POLL_COMPLETED = 'activity_poller.poll_completed'.freeze
+ ACTIVITY_TASK_QUEUE_TIME = 'activity_task.queue_time'.freeze
+ ACTIVITY_TASK_LATENCY = 'activity_task.latency'.freeze
+
+ WORKFLOW_POLLER_TIME_SINCE_LAST_POLL = 'workflow_poller.time_since_last_poll'.freeze
+ WORKFLOW_POLLER_POLL_COMPLETED = 'workflow_poller.poll_completed'.freeze
+ WORKFLOW_TASK_QUEUE_TIME = 'workflow_task.queue_time'.freeze
+ WORKFLOW_TASK_LATENCY = 'workflow_task.latency'.freeze
+ WORKFLOW_TASK_EXECUTION_FAILED = 'workflow_task.execution_failed'.freeze
+
+ THREAD_POOL_AVAILABLE_THREADS = 'thread_pool.available_threads'.freeze
+ end
+end
diff --git a/lib/temporal/middleware/header_propagator_chain.rb b/lib/temporal/middleware/header_propagator_chain.rb
new file mode 100644
index 00000000..39384438
--- /dev/null
+++ b/lib/temporal/middleware/header_propagator_chain.rb
@@ -0,0 +1,22 @@
+module Temporal
+ module Middleware
+ class HeaderPropagatorChain
+ def initialize(entries = [])
+ @propagators = entries.map(&:init_middleware)
+ end
+
+ def inject(headers)
+ return headers if propagators.empty?
+ h = headers.dup
+ for propagator in propagators
+ propagator.inject!(h)
+ end
+ h
+ end
+
+ private
+
+ attr_reader :propagators
+ end
+ end
+end
\ No newline at end of file
diff --git a/lib/temporal/reset_reapply_type.rb b/lib/temporal/reset_reapply_type.rb
new file mode 100644
index 00000000..29a489d4
--- /dev/null
+++ b/lib/temporal/reset_reapply_type.rb
@@ -0,0 +1,6 @@
+module Temporal
+ module ResetReapplyType
+ SIGNAL = :signal
+ NONE = :none
+ end
+end
diff --git a/lib/temporal/schedule.rb b/lib/temporal/schedule.rb
new file mode 100644
index 00000000..8fa84d2d
--- /dev/null
+++ b/lib/temporal/schedule.rb
@@ -0,0 +1,16 @@
+require "temporal/schedule/backfill"
+require "temporal/schedule/calendar"
+require "temporal/schedule/describe_schedule_response"
+require "temporal/schedule/interval"
+require "temporal/schedule/list_schedules_response"
+require "temporal/schedule/schedule"
+require "temporal/schedule/schedule_list_entry"
+require "temporal/schedule/schedule_policies"
+require "temporal/schedule/schedule_spec"
+require "temporal/schedule/schedule_state"
+require "temporal/schedule/start_workflow_action"
+
+module Temporal
+ module Schedule
+ end
+end
diff --git a/lib/temporal/schedule/backfill.rb b/lib/temporal/schedule/backfill.rb
new file mode 100644
index 00000000..b107d3f6
--- /dev/null
+++ b/lib/temporal/schedule/backfill.rb
@@ -0,0 +1,42 @@
+module Temporal
+ module Schedule
+ class Backfill
+ # Controls what happens when a workflow would be started
+ # by a schedule, and is already running.
+ #
+ # If provided, must be one of:
+ # - :skip (default): means don't start anything. When the workflow
+ # completes, the next scheduled event after that time will be considered.
+ # - :buffer_one: means start the workflow again soon as the
+ # current one completes, but only buffer one start in this way. If
+ # another start is supposed to happen when the workflow is running,
+ # and one is already buffered, then only the first one will be
+ # started after the running workflow finishes.
+ # - :buffer_all : means buffer up any number of starts to all happen
+ # sequentially, immediately after the running workflow completes.
+ # - :cancel_other: means that if there is another workflow running, cancel
+ # it, and start the new one after the old one completes cancellation.
+ # - :terminate_other: means that if there is another workflow running,
+ # terminate it and start the new one immediately.
+ # - :allow_all: means start any number of concurrent workflows.
+ # Note that with this policy, last completion result and last failure
+ # will not be available since workflows are not sequential.
+ attr_reader :overlap_policy
+
+ # The time to start the backfill
+ attr_reader :start_time
+
+ # The time to end the backfill
+ attr_reader :end_time
+
+ # @param start_time [Time] The time to start the backfill
+ # @param end_time [Time] The time to end the backfill
+ # @param overlap_policy [Time] Should be one of :skip, :buffer_one, :buffer_all, :cancel_other, :terminate_other, :allow_all
+ def initialize(start_time: nil, end_time: nil, overlap_policy: nil)
+ @start_time = start_time
+ @end_time = end_time
+ @overlap_policy = overlap_policy
+ end
+ end
+ end
+end
diff --git a/lib/temporal/schedule/calendar.rb b/lib/temporal/schedule/calendar.rb
new file mode 100644
index 00000000..26d24d49
--- /dev/null
+++ b/lib/temporal/schedule/calendar.rb
@@ -0,0 +1,48 @@
+module Temporal
+ module Schedule
+
+ # Calendar describes an event specification relative to the calendar,
+ # similar to a traditional cron specification, but with labeled fields. Each
+ # field can be one of:
+ # *: matches always
+ # x: matches when the field equals x
+ # x/y : matches when the field equals x+n*y where n is an integer
+ # x-z: matches when the field is between x and z inclusive
+ # w,x,y,...: matches when the field is one of the listed values
+ #
+ # Each x, y, z, ... is either a decimal integer, or a month or day of week name
+ # or abbreviation (in the appropriate fields).
+ #
+ # A timestamp matches if all fields match.
+ #
+ # Note that fields have different default values, for convenience.
+ #
+ # Note that the special case that some cron implementations have for treating
+ # day_of_month and day_of_week as "or" instead of "and" when both are set is
+ # not implemented.
+ #
+ # day_of_week can accept 0 or 7 as Sunday
+ class Calendar
+ attr_reader :second, :minute, :hour, :day_of_month, :month, :year, :day_of_week, :comment
+
+ # @param second [String] Expression to match seconds. Default: 0
+ # @param minute [String] Expression to match minutes. Default: 0
+ # @param hour [String] Expression to match hours. Default: 0
+ # @param day_of_month [String] Expression to match days of the month. Default: *
+ # @param month [String] Expression to match months. Default: *
+ # @param year [String] Expression to match years. Default: *
+ # @param day_of_week [String] Expression to match days of the week. Default: *
+ # @param comment [String] Free form comment describing the intent of this calendar.
+ def initialize(second: nil, minute: nil, hour: nil, day_of_month: nil, month: nil, year: nil, day_of_week: nil, comment: nil)
+ @second = second
+ @minute = minute
+ @hour = hour
+ @day_of_month = day_of_month
+ @month = month
+ @day_of_week = day_of_week
+ @year = year
+ @comment = comment
+ end
+ end
+ end
+end
diff --git a/lib/temporal/schedule/describe_schedule_response.rb b/lib/temporal/schedule/describe_schedule_response.rb
new file mode 100644
index 00000000..d0d3c627
--- /dev/null
+++ b/lib/temporal/schedule/describe_schedule_response.rb
@@ -0,0 +1,11 @@
+module Temporal
+ module Schedule
+ class DescribeScheduleResponse < Struct.new(:schedule, :info, :memo, :search_attributes, :conflict_token, keyword_init: true)
+ # Override the constructor to make these objects immutable
+ def initialize(*args)
+ super(*args)
+ self.freeze
+ end
+ end
+ end
+end
diff --git a/lib/temporal/schedule/interval.rb b/lib/temporal/schedule/interval.rb
new file mode 100644
index 00000000..0d3650c9
--- /dev/null
+++ b/lib/temporal/schedule/interval.rb
@@ -0,0 +1,24 @@
+module Temporal
+ module Schedule
+ # Interval matches times that can be expressed as:
+ # Epoch + (n * every) + offset
+ # where n is all integers ≥ 0.
+
+ # For example, an `every` of 1 hour with `offset` of zero would match
+ # every hour, on the hour. The same `every` but an `offset`
+ # of 19 minutes would match every `xx:19:00`. An `every` of 28 days with
+ # `offset` zero would match `2022-02-17T00:00:00Z` (among other times).
+ # The same `every` with `offset` of 3 days, 5 hours, and 23 minutes
+ # would match `2022-02-20T05:23:00Z` instead.
+ class Interval
+ attr_reader :every, :offset
+
+ # @param every [Integer] the number of seconds between each interval
+ # @param offset [Integer] the number of seconds to provide as offset
+ def initialize(every:, offset: nil)
+ @every = every
+ @offset = offset
+ end
+ end
+ end
+end
diff --git a/lib/temporal/schedule/list_schedules_response.rb b/lib/temporal/schedule/list_schedules_response.rb
new file mode 100644
index 00000000..acf90b74
--- /dev/null
+++ b/lib/temporal/schedule/list_schedules_response.rb
@@ -0,0 +1,11 @@
+module Temporal
+ module Schedule
+ class ListSchedulesResponse < Struct.new(:schedules, :next_page_token, keyword_init: true)
+ # Override the constructor to make these objects immutable
+ def initialize(*args)
+ super(*args)
+ self.freeze
+ end
+ end
+ end
+end
diff --git a/lib/temporal/schedule/schedule.rb b/lib/temporal/schedule/schedule.rb
new file mode 100644
index 00000000..91fcf7d1
--- /dev/null
+++ b/lib/temporal/schedule/schedule.rb
@@ -0,0 +1,14 @@
+module Temporal
+ module Schedule
+ class Schedule
+ attr_reader :spec, :action, :policies, :state
+
+ def initialize(spec:, action:, policies: nil, state: nil)
+ @spec = spec
+ @action = action
+ @policies = policies
+ @state = state
+ end
+ end
+ end
+end
diff --git a/lib/temporal/schedule/schedule_list_entry.rb b/lib/temporal/schedule/schedule_list_entry.rb
new file mode 100644
index 00000000..338d966e
--- /dev/null
+++ b/lib/temporal/schedule/schedule_list_entry.rb
@@ -0,0 +1,12 @@
+module Temporal
+ module Schedule
+ # ScheduleListEntry is returned by ListSchedules.
+ class ScheduleListEntry < Struct.new(:schedule_id, :memo, :search_attributes, :info, keyword_init: true)
+ # Override the constructor to make these objects immutable
+ def initialize(*args)
+ super(*args)
+ self.freeze
+ end
+ end
+ end
+end
diff --git a/lib/temporal/schedule/schedule_policies.rb b/lib/temporal/schedule/schedule_policies.rb
new file mode 100644
index 00000000..f8aeea21
--- /dev/null
+++ b/lib/temporal/schedule/schedule_policies.rb
@@ -0,0 +1,48 @@
+module Temporal
+ module Schedule
+ class SchedulePolicies
+ # Controls what happens when a workflow would be started
+ # by a schedule, and is already running.
+ #
+ # If provided, must be one of:
+ # - :skip (default): means don't start anything. When the workflow
+ # completes, the next scheduled event after that time will be considered.
+ # - :buffer_one: means start the workflow again soon as the
+ # current one completes, but only buffer one start in this way. If
+ # another start is supposed to happen when the workflow is running,
+ # and one is already buffered, then only the first one will be
+ # started after the running workflow finishes.
+ # - :buffer_all : means buffer up any number of starts to all happen
+ # sequentially, immediately after the running workflow completes.
+ # - :cancel_other: means that if there is another workflow running, cancel
+ # it, and start the new one after the old one completes cancellation.
+ # - :terminate_other: means that if there is another workflow running,
+ # terminate it and start the new one immediately.
+ # - :allow_all: means start any number of concurrent workflows.
+ # Note that with this policy, last completion result and last failure
+ # will not be available since workflows are not sequential.
+ attr_reader :overlap_policy
+
+ # Policy for catchups:
+ # If the Temporal server misses an action due to one or more components
+ # being down, and comes back up, the action will be run if the scheduled
+ # time is within this window from the current time.
+ # This value defaults to 60 seconds, and can't be less than 10 seconds.
+ attr_reader :catchup_window
+
+ # If true, and a workflow run fails or times out, turn on "paused".
+ # This applies after retry policies: the full chain of retries must fail to
+ # trigger a pause here.
+ attr_reader :pause_on_failure
+
+ # @param overlap_policy [Symbol] Should be one of :skip, :buffer_one, :buffer_all, :cancel_other, :terminate_other, :allow_all
+ # @param catchup_window [Integer] The number of seconds to catchup if the Temporal server misses an action
+ # @param pause_on_failure [Boolean] Whether to pause the schedule if the action fails
+ def initialize(overlap_policy: nil, catchup_window: nil, pause_on_failure: nil)
+ @overlap_policy = overlap_policy
+ @catchup_window = catchup_window
+ @pause_on_failure = pause_on_failure
+ end
+ end
+ end
+end
diff --git a/lib/temporal/schedule/schedule_spec.rb b/lib/temporal/schedule/schedule_spec.rb
new file mode 100644
index 00000000..1034d298
--- /dev/null
+++ b/lib/temporal/schedule/schedule_spec.rb
@@ -0,0 +1,93 @@
+module Temporal
+ module Schedule
+ # ScheduleSpec is a complete description of a set of absolute timestamps
+ # (possibly infinite) that an action should occur at. The meaning of a
+ # ScheduleSpec depends only on its contents and never changes, except that the
+ # definition of a time zone can change over time (most commonly, when daylight
+ # saving time policy changes for an area). To create a totally self-contained
+ # ScheduleSpec, use UTC or include timezone_data
+
+ # For input, you can provide zero or more of: calendars, intervals or
+ # cron_expressions and all of them will be used (the schedule will take
+ # action at the union of all of their times, minus the ones that match
+ # exclude_structured_calendar).
+ class ScheduleSpec
+ # Calendar-based specifications of times.
+ #
+ # @return [Array]
+ attr_reader :calendars
+
+ # Interval-based specifications of times.
+ #
+ # @return [Array]
+ attr_reader :intervals
+
+ # [Cron expressions](https://crontab.guru/). This is provided for easy
+ # migration from legacy Cron Workflows. For new use cases, we recommend
+ # using calendars or intervals for readability and maintainability.
+ #
+ #
+ # The string can have 5, 6, or 7 fields, separated by spaces.
+ #
+ # - 5 fields: minute, hour, day_of_month, month, day_of_week
+ # - 6 fields: minute, hour, day_of_month, month, day_of_week, year
+ # - 7 fields: second, minute, hour, day_of_month, month, day_of_week, year
+ #
+ # Notes:
+ #
+ # - If year is not given, it defaults to *.
+ # - If second is not given, it defaults to 0.
+ # - Shorthands `@yearly`, `@monthly`, `@weekly`, `@daily`, and `@hourly` are also
+ # accepted instead of the 5-7 time fields.
+ # - `@every interval[/]` is accepted and gets compiled into an
+ # IntervalSpec instead. `` and `` should be a decimal integer
+ # with a unit suffix s, m, h, or d.
+ # - Optionally, the string can be preceded by `CRON_TZ=` or
+ # `TZ=`, which will get copied to {@link timezone}.
+ # (In which case the {@link timezone} field should be left empty.)
+ # - Optionally, "#" followed by a comment can appear at the end of the string.
+ # - Note that the special case that some cron implementations have for
+ # treating day_of_month and day_of_week as "or" instead of "and" when both
+ # are set is not implemented.
+ #
+ # @return [Array]
+ attr_reader :cron_expressions
+
+ # If set, any timestamps before start_time will be skipped.
+ attr_reader :start_time
+
+ # If set, any timestamps after end_time will be skipped.
+ attr_reader :end_time
+
+ # If set, the schedule will be randomly offset by up to this many seconds.
+ attr_reader :jitter
+
+ # Time zone to interpret all calendar-based specs in.
+ #
+ # If unset, defaults to UTC. We recommend using UTC for your application if
+ # at all possible, to avoid various surprising properties of time zones.
+ #
+ # Time zones may be provided by name, corresponding to names in the IANA
+ # time zone database (see https://www.iana.org/time-zones). The definition
+ # will be loaded by the Temporal server from the environment it runs in.
+ attr_reader :timezone_name
+
+ # @param cron_expressions [Array]
+ # @param intervals [Array]
+ # @param calendars [Array]
+ # @param start_time [Time] If set, any timestamps before start_time will be skipped.
+ # @param end_time [Time] If set, any timestamps after end_time will be skipped.
+ # @param jitter [Integer] If set, the schedule will be randomly offset by up to this many seconds.
+ # @param timezone_name [String] If set, the schedule will be interpreted in this time zone.
+ def initialize(cron_expressions: nil, intervals: nil, calendars: nil, start_time: nil, end_time: nil, jitter: nil, timezone_name: nil)
+ @cron_expressions = cron_expressions || []
+ @intervals = intervals || []
+ @calendars = calendars || []
+ @start_time = start_time
+ @end_time = end_time
+ @jitter = jitter
+ @timezone_name = timezone_name
+ end
+ end
+ end
+end
diff --git a/lib/temporal/schedule/schedule_state.rb b/lib/temporal/schedule/schedule_state.rb
new file mode 100644
index 00000000..4debb82c
--- /dev/null
+++ b/lib/temporal/schedule/schedule_state.rb
@@ -0,0 +1,18 @@
+module Temporal
+ module Schedule
+ class ScheduleState
+ attr_reader :notes, :paused, :limited_actions, :remaining_actions
+
+ # @param notes [String] Human-readable notes about the schedule.
+ # @param paused [Boolean] If true, do not take any actions based on the schedule spec.
+ # @param limited_actions [Boolean] If true, decrement remaining_actions when an action is taken.
+ # @param remaining_actions [Integer] The number of actions remaining to be taken.
+ def initialize(notes: nil, paused: nil, limited_actions: nil, remaining_actions: nil)
+ @notes = notes
+ @paused = paused
+ @limited_actions = limited_actions
+ @remaining_actions = remaining_actions
+ end
+ end
+ end
+end
diff --git a/lib/temporal/schedule/start_workflow_action.rb b/lib/temporal/schedule/start_workflow_action.rb
new file mode 100644
index 00000000..19348fcd
--- /dev/null
+++ b/lib/temporal/schedule/start_workflow_action.rb
@@ -0,0 +1,58 @@
+require "forwardable"
+
+module Temporal
+ module Schedule
+ class StartWorkflowAction
+ extend Forwardable
+
+ #target
+ def_delegators(
+ :@execution_options,
+ :name,
+ :task_queue,
+ :headers,
+ :memo
+ )
+
+ attr_reader :workflow_id, :input
+
+ # @param workflow [Temporal::Workflow, String] workflow class or name. When a workflow class
+ # is passed, its config (namespace, task_queue, timeouts, etc) will be used
+ # @param input [any] arguments to be passed to workflow's #execute method
+ # @param args [Hash] keyword arguments to be passed to workflow's #execute method
+ # @param options [Hash, nil] optional overrides
+ # @option options [String] :workflow_id
+ # @option options [String] :name workflow name
+ # @option options [String] :namespace
+ # @option options [String] :task_queue
+ # @option options [Hash] :retry_policy check Temporal::RetryPolicy for available options
+ # @option options [Hash] :timeouts check Temporal::Configuration::DEFAULT_TIMEOUTS
+ # @option options [Hash] :headers
+ # @option options [Hash] :search_attributes
+ #
+ # @return [String] workflow's run ID
+ def initialize(workflow, *input, options: {})
+ @workflow_id = options[:workflow_id] || SecureRandom.uuid
+ @input = input
+
+ @execution_options = ExecutionOptions.new(workflow, options)
+ end
+
+ def execution_timeout
+ @execution_options.timeouts[:execution]
+ end
+
+ def run_timeout
+ @execution_options.timeouts[:run] || @execution_options.timeouts[:execution]
+ end
+
+ def task_timeout
+ @execution_options.timeouts[:task]
+ end
+
+ def search_attributes
+ Workflow::Context::Helpers.process_search_attributes(@execution_options.search_attributes)
+ end
+ end
+ end
+end
diff --git a/lib/temporal/scheduled_thread_pool.rb b/lib/temporal/scheduled_thread_pool.rb
new file mode 100644
index 00000000..5e9025af
--- /dev/null
+++ b/lib/temporal/scheduled_thread_pool.rb
@@ -0,0 +1,123 @@
+require 'temporal/metric_keys'
+
+# This class implements a thread pool for scheduling tasks with a delay.
+# If threads are all occupied when a task is scheduled, it will be queued
+# with the sleep delay adjusted based on the wait time.
+module Temporal
+ class ScheduledThreadPool
+ attr_reader :size
+
+ ScheduledItem = Struct.new(:id, :job, :fire_at, :canceled, keyword_init: true)
+
+ def initialize(size, config, metrics_tags)
+ @size = size
+ @metrics_tags = metrics_tags
+ @queue = Queue.new
+ @mutex = Mutex.new
+ @config = config
+ @available_threads = size
+ @occupied_threads = {}
+ @pool = Array.new(size) do |_i|
+ Thread.new { poll }
+ end
+ end
+
+ def schedule(id, delay, &block)
+ item = ScheduledItem.new(
+ id: id,
+ job: block,
+ fire_at: Time.now + delay,
+ canceled: false)
+ @mutex.synchronize do
+ @available_threads -= 1
+ @queue << item
+ end
+
+ report_metrics
+
+ item
+ end
+
+ def cancel(item)
+ thread = @mutex.synchronize do
+ @occupied_threads[item.id]
+ end
+
+ item.canceled = true
+ unless thread.nil?
+ thread.raise(CancelError.new)
+ end
+
+ item
+ end
+
+ def shutdown
+ size.times do
+ @mutex.synchronize do
+ @queue << EXIT_SYMBOL
+ end
+ end
+
+ @pool.each(&:join)
+ end
+
+ private
+
+ class CancelError < StandardError; end
+ EXIT_SYMBOL = :exit
+
+ def poll
+ Thread.current.abort_on_exception = true
+
+ loop do
+ item = @queue.pop
+ if item == EXIT_SYMBOL
+ return
+ end
+
+ begin
+ Thread.handle_interrupt(CancelError => :immediate) do
+ @mutex.synchronize do
+ @occupied_threads[item.id] = Thread.current
+ end
+
+ if !item.canceled
+ delay = item.fire_at - Time.now
+ if delay > 0
+ sleep delay
+ end
+ end
+ end
+
+ # Job call is outside cancel handle interrupt block because the job can't
+ # reliably be stopped once running. It's still in the begin/rescue block
+ # so that it won't be executed if the thread gets canceled.
+ if !item.canceled
+ begin
+ item.job.call
+ rescue StandardError => e
+ Temporal.logger.error('Error reached top of thread pool thread', { error: e.inspect })
+ Temporal::ErrorHandler.handle(e, @config)
+ rescue Exception => ex
+ Temporal.logger.error('Exception reached top of thread pool thread', { error: ex.inspect })
+ Temporal::ErrorHandler.handle(ex, @config)
+ raise
+ end
+ end
+ rescue CancelError
+ end
+
+ @mutex.synchronize do
+ @available_threads += 1
+ @occupied_threads.delete(item.id)
+ end
+
+ report_metrics
+ end
+ end
+
+ def report_metrics
+ Temporal.metrics.gauge(Temporal::MetricKeys::THREAD_POOL_AVAILABLE_THREADS, @available_threads, @metrics_tags)
+ end
+ end
+end
diff --git a/lib/temporal/testing/local_activity_context.rb b/lib/temporal/testing/local_activity_context.rb
index 4dec0479..b89fa44f 100644
--- a/lib/temporal/testing/local_activity_context.rb
+++ b/lib/temporal/testing/local_activity_context.rb
@@ -6,7 +6,7 @@ module Temporal
module Testing
class LocalActivityContext < Activity::Context
def initialize(metadata)
- super(nil, metadata)
+ super(nil, metadata, nil, nil)
end
def heartbeat(details = nil)
diff --git a/lib/temporal/testing/local_workflow_context.rb b/lib/temporal/testing/local_workflow_context.rb
index 3642a7d3..7d3321ae 100644
--- a/lib/temporal/testing/local_workflow_context.rb
+++ b/lib/temporal/testing/local_workflow_context.rb
@@ -5,11 +5,12 @@
require 'temporal/metadata/activity'
require 'temporal/workflow/future'
require 'temporal/workflow/history/event_target'
+require 'temporal/workflow/context_helpers'
module Temporal
module Testing
class LocalWorkflowContext
- attr_reader :metadata
+ attr_reader :metadata, :config
def initialize(execution, workflow_id, run_id, disabled_releases, metadata, config = Temporal.configuration)
@last_event_id = 0
@@ -57,9 +58,12 @@ def execute_activity(activity_class, *input, **args)
attempt: 1,
workflow_run_id: run_id,
workflow_id: workflow_id,
- workflow_name: nil, # not yet used, but will be in the future
+ workflow_name: self.metadata.name,
headers: execution_options.headers,
- heartbeat_details: nil
+ heartbeat_details: nil,
+ scheduled_at: Time.now,
+ current_attempt_scheduled_at: Time.now,
+ heartbeat_timeout: 0
)
context = LocalActivityContext.new(metadata)
@@ -105,9 +109,12 @@ def execute_local_activity(activity_class, *input, **args)
attempt: 1,
workflow_run_id: run_id,
workflow_id: workflow_id,
- workflow_name: nil, # not yet used, but will be in the future
+ workflow_name: self.metadata.name,
headers: execution_options.headers,
- heartbeat_details: nil
+ heartbeat_details: nil,
+ scheduled_at: Time.now,
+ current_attempt_scheduled_at: Time.now,
+ heartbeat_timeout: 0
)
context = LocalActivityContext.new(metadata)
@@ -126,8 +133,22 @@ def execute_workflow!(workflow_class, *input, **args)
workflow_id = SecureRandom.uuid
run_id = SecureRandom.uuid
execution_options = ExecutionOptions.new(workflow_class, options, config.default_execution_options)
+
+ child_metadata = Temporal::Metadata::Workflow.new(
+ namespace: execution_options.namespace,
+ id: workflow_id,
+ name: execution_options.name, # Workflow class name
+ run_id: run_id,
+ parent_id: @workflow_id,
+ parent_run_id: @run_id,
+ attempt: 1,
+ task_queue: execution_options.task_queue,
+ headers: execution_options.headers,
+ run_started_at: Time.now,
+ memo: {},
+ )
context = Temporal::Testing::LocalWorkflowContext.new(
- execution, workflow_id, run_id, workflow_class.disabled_releases, execution_options.headers
+ execution, workflow_id, run_id, workflow_class.disabled_releases, child_metadata
)
workflow_class.execute_in_context(context, input)
@@ -159,36 +180,67 @@ def fail(exception)
raise exception
end
+ def continue_as_new(*input, **args)
+ raise NotImplementedError, 'not yet available for testing'
+ end
+
def wait_for_all(*futures)
futures.each(&:wait)
return
end
- def wait_for(future)
- # Point of communication
- Fiber.yield while !future.finished?
+ def wait_for_any(*futures)
+ return if futures.empty?
+
+ Fiber.yield while futures.none?(&:finished?)
+
+ return
+ end
+
+ def wait_until(&unblock_condition)
+ raise 'You must pass an unblock condition block to wait_for' if unblock_condition.nil?
+
+ Fiber.yield until unblock_condition.call
+
+ return
end
def now
Time.now
end
- def on_signal(&block)
- raise NotImplementedError, 'not yet available for testing'
+ def on_signal(signal_name = nil, &block)
+ raise NotImplementedError, 'Signals are not available when Temporal::Testing.local! is on'
+ end
+
+ def on_query(query, &block)
+ raise NotImplementedError, 'Queries are not available when Temporal::Testing.local! is on'
end
def cancel_activity(activity_id)
- raise NotImplementedError, 'not yet available for testing'
+ raise NotImplementedError, 'Cancel is not available when Temporal::Testing.local! is on'
end
def cancel(target, cancelation_id)
- raise NotImplementedError, 'not yet available for testing'
+ raise NotImplementedError, 'Cancel is not available when Temporal::Testing.local! is on'
+ end
+
+ def signal_external_workflow(workflow, signal, workflow_id, run_id = nil, input = nil, namespace: nil, child_workflow_only: false)
+ raise NotImplementedError, 'Signals are not available when Temporal::Testing.local! is on'
+ end
+
+ def upsert_search_attributes(search_attributes)
+ search_attributes = Temporal::Workflow::Context::Helpers.process_search_attributes(search_attributes)
+ if search_attributes.empty?
+ raise ArgumentError, "Cannot upsert an empty hash for search_attributes, as this would do nothing."
+ end
+ execution.upsert_search_attributes(search_attributes)
end
private
- attr_reader :execution, :run_id, :workflow_id, :disabled_releases, :config
+ attr_reader :execution, :run_id, :workflow_id, :disabled_releases
def completed!
@completed = true
diff --git a/lib/temporal/testing/replay_tester.rb b/lib/temporal/testing/replay_tester.rb
new file mode 100644
index 00000000..6a98c86e
--- /dev/null
+++ b/lib/temporal/testing/replay_tester.rb
@@ -0,0 +1,73 @@
+require "gen/temporal/api/history/v1/message_pb"
+require "json"
+require "temporal/errors"
+require "temporal/metadata/workflow_task"
+require "temporal/middleware/chain"
+require "temporal/workflow/executor"
+require "temporal/workflow/stack_trace_tracker"
+
+module Temporal
+ module Testing
+ class ReplayError < StandardError
+ end
+
+ class ReplayTester
+ def initialize(config: Temporal.configuration)
+ @config = config
+ end
+
+ attr_reader :config
+
+ # Runs a replay test by using the specific Temporal::Workflow::History object. Instances of these objects
+ # can be obtained using various from_ methods in Temporal::Workflow::History::Serialization.
+ #
+ # If the replay test succeeds, the method will return silently. If the replay tests fails, an error will be raised.
+ def replay_history(workflow_class, history)
+ # This code roughly resembles the workflow TaskProcessor but with history being fed in rather
+ # than being pulled via a workflow task, no query support, no metrics, and other
+ # simplifications. Fake metadata needs to be provided.
+ start_workflow_event = history.find_event_by_id(1)
+ if start_workflow_event.nil? || start_workflow_event.type != "WORKFLOW_EXECUTION_STARTED"
+ raise ReplayError, "History does not start with workflow_execution_started event"
+ end
+
+ metadata = Temporal::Metadata::WorkflowTask.new(
+ namespace: config.namespace,
+ id: 1,
+ task_token: "",
+ attempt: 1,
+ workflow_run_id: "run_id",
+ workflow_id: "workflow_id",
+ # Protobuf deserialization will ensure this tree is present
+ workflow_name: start_workflow_event.attributes.workflow_type.name
+ )
+
+ executor = Workflow::Executor.new(
+ workflow_class,
+ history,
+ metadata,
+ config,
+ true,
+ Middleware::Chain.new([])
+ )
+
+ begin
+ executor.run
+ rescue StandardError
+ query = Struct.new(:query_type, :query_args).new(
+ Temporal::Workflow::StackTraceTracker::STACK_TRACE_QUERY_NAME,
+ nil
+ )
+ query_result = executor.process_queries(
+ {"stack_trace" => query}
+ )
+ replay_error = ReplayError.new("Workflow code failed to replay successfully against history")
+ # Override the stack trace to the point in the workflow code where the failure occured, not the
+ # point in the StateManager where non-determinism is detected
+ replay_error.set_backtrace("Fiber backtraces: #{query_result["stack_trace"].result}")
+ raise replay_error
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/testing/temporal_override.rb b/lib/temporal/testing/temporal_override.rb
index de61b591..c67515e4 100644
--- a/lib/temporal/testing/temporal_override.rb
+++ b/lib/temporal/testing/temporal_override.rb
@@ -1,6 +1,8 @@
require 'securerandom'
require 'temporal/activity/async_token'
+require 'temporal/workflow/context_helpers'
require 'temporal/workflow/execution_info'
+require 'temporal/workflow/status'
require 'temporal/testing/workflow_execution'
require 'temporal/testing/local_workflow_context'
@@ -31,7 +33,6 @@ def schedule_workflow(workflow, cron_schedule, *input, **args)
def fetch_workflow_execution_info(_namespace, workflow_id, run_id)
return super if Temporal::Testing.disabled?
-
execution = executions[[workflow_id, run_id]]
Workflow::ExecutionInfo.new(
@@ -42,6 +43,7 @@ def fetch_workflow_execution_info(_namespace, workflow_id, run_id)
close_time: nil,
status: execution.status,
history_length: nil,
+ search_attributes: execution.search_attributes,
).freeze
end
@@ -73,9 +75,16 @@ def start_locally(workflow, schedule, *input, **args)
options = args.delete(:options) || {}
input << args unless args.empty?
+ # signals aren't supported at all, so let's prohibit start_workflow calls that try to signal
+ signal_name = options.delete(:signal_name)
+ signal_input = options.delete(:signal_input)
+ raise NotImplementedError, 'Signals are not available when Temporal::Testing.local! is on' if signal_name || signal_input
+
reuse_policy = options[:workflow_id_reuse_policy] || :allow_failed
workflow_id = options[:workflow_id] || SecureRandom.uuid
run_id = SecureRandom.uuid
+ memo = options[:memo] || {}
+ initial_search_attributes = Workflow::Context::Helpers.process_search_attributes(options[:search_attributes] || {})
if !allowed?(workflow_id, reuse_policy)
raise Temporal::WorkflowExecutionAlreadyStartedFailure.new(
@@ -84,12 +93,22 @@ def start_locally(workflow, schedule, *input, **args)
)
end
- execution = WorkflowExecution.new
+ execution = WorkflowExecution.new(initial_search_attributes: initial_search_attributes)
executions[[workflow_id, run_id]] = execution
execution_options = ExecutionOptions.new(workflow, options)
metadata = Metadata::Workflow.new(
- name: workflow_id, run_id: run_id, attempt: 1, headers: execution_options.headers
+ namespace: execution_options.namespace,
+ id: workflow_id,
+ name: execution_options.name,
+ run_id: run_id,
+ parent_id: nil,
+ parent_run_id: nil,
+ attempt: 1,
+ task_queue: execution_options.task_queue,
+ run_started_at: Time.now,
+ memo: memo,
+ headers: execution_options.headers
)
context = Temporal::Testing::LocalWorkflowContext.new(
execution, workflow_id, run_id, workflow.disabled_releases, metadata
@@ -133,14 +152,14 @@ def previous_run_id(workflow_id)
def disallowed_statuses_for(reuse_policy)
case reuse_policy
when :allow_failed
- [Workflow::ExecutionInfo::RUNNING_STATUS, Workflow::ExecutionInfo::COMPLETED_STATUS]
+ [Workflow::Status::RUNNING, Workflow::Status::COMPLETED]
when :allow
- [Workflow::ExecutionInfo::RUNNING_STATUS]
+ [Workflow::Status::RUNNING]
when :reject
[
- Workflow::ExecutionInfo::RUNNING_STATUS,
- Workflow::ExecutionInfo::FAILED_STATUS,
- Workflow::ExecutionInfo::COMPLETED_STATUS
+ Workflow::Status::RUNNING,
+ Workflow::Status::FAILED,
+ Workflow::Status::COMPLETED
]
end
end
diff --git a/lib/temporal/testing/workflow_execution.rb b/lib/temporal/testing/workflow_execution.rb
index ada7d0e8..6ddbb18e 100644
--- a/lib/temporal/testing/workflow_execution.rb
+++ b/lib/temporal/testing/workflow_execution.rb
@@ -1,13 +1,15 @@
require 'temporal/testing/future_registry'
+require 'temporal/workflow/status'
module Temporal
module Testing
class WorkflowExecution
- attr_reader :status
+ attr_reader :status, :search_attributes
- def initialize
- @status = Workflow::ExecutionInfo::RUNNING_STATUS
+ def initialize(initial_search_attributes: {})
+ @status = Workflow::Status::RUNNING
@futures = FutureRegistry.new
+ @search_attributes = initial_search_attributes
end
def run(&block)
@@ -17,9 +19,9 @@ def run(&block)
def resume
fiber.resume
- @status = Workflow::ExecutionInfo::COMPLETED_STATUS unless fiber.alive?
+ @status = Workflow::Status::COMPLETED unless fiber.alive?
rescue StandardError
- @status = Workflow::ExecutionInfo::FAILED_STATUS
+ @status = Workflow::Status::FAILED
end
def register_future(token, future)
@@ -36,6 +38,10 @@ def fail_activity(token, exception)
resume
end
+ def upsert_search_attributes(search_attributes)
+ @search_attributes.merge!(search_attributes)
+ end
+
private
attr_reader :fiber, :futures
diff --git a/lib/temporal/testing/workflow_override.rb b/lib/temporal/testing/workflow_override.rb
index a36e843e..45e989d4 100644
--- a/lib/temporal/testing/workflow_override.rb
+++ b/lib/temporal/testing/workflow_override.rb
@@ -28,7 +28,17 @@ def execute_locally(*input)
run_id = SecureRandom.uuid
execution = WorkflowExecution.new
metadata = Temporal::Metadata::Workflow.new(
- name: workflow_id, run_id: run_id, attempt: 1
+ namespace: nil,
+ id: workflow_id,
+ name: name, # Workflow class name
+ run_id: run_id,
+ parent_id: nil,
+ parent_run_id: nil,
+ attempt: 1,
+ task_queue: 'unit-test-task-queue',
+ headers: {},
+ run_started_at: Time.now,
+ memo: {},
)
context = Temporal::Testing::LocalWorkflowContext.new(
execution, workflow_id, run_id, disabled_releases, metadata
diff --git a/lib/temporal/thread_pool.rb b/lib/temporal/thread_pool.rb
index 487686fb..3febbf82 100644
--- a/lib/temporal/thread_pool.rb
+++ b/lib/temporal/thread_pool.rb
@@ -1,3 +1,5 @@
+require 'temporal/metric_keys'
+
# This class implements a very simple ThreadPool with the ability to
# block until at least one thread becomes available. This allows Pollers
# to only poll when there's an available thread in the pool.
@@ -9,22 +11,26 @@ module Temporal
class ThreadPool
attr_reader :size
- def initialize(size)
+ def initialize(size, config, metrics_tags)
@size = size
+ @metrics_tags = metrics_tags
@queue = Queue.new
@mutex = Mutex.new
+ @config = config
@availability = ConditionVariable.new
@available_threads = size
- @pool = Array.new(size) do |i|
+ @pool = Array.new(size) do |_i|
Thread.new { poll }
end
end
+ def report_metrics
+ Temporal.metrics.gauge(Temporal::MetricKeys::THREAD_POOL_AVAILABLE_THREADS, @available_threads, @metrics_tags)
+ end
+
def wait_for_available_threads
@mutex.synchronize do
- while @available_threads <= 0
- @availability.wait(@mutex)
- end
+ @availability.wait(@mutex) while @available_threads <= 0
end
end
@@ -33,6 +39,8 @@ def schedule(&block)
@available_threads -= 1
@queue << block
end
+
+ report_metrics
end
def shutdown
@@ -48,14 +56,27 @@ def shutdown
EXIT_SYMBOL = :exit
def poll
+ Thread.current.abort_on_exception = true
+
catch(EXIT_SYMBOL) do
loop do
job = @queue.pop
- job.call
+ begin
+ job.call
+ rescue StandardError => e
+ Temporal.logger.error('Error reached top of thread pool thread', { error: e.inspect })
+ Temporal::ErrorHandler.handle(e, @config)
+ rescue Exception => ex
+ Temporal.logger.error('Exception reached top of thread pool thread', { error: ex.inspect })
+ Temporal::ErrorHandler.handle(ex, @config)
+ raise
+ end
@mutex.synchronize do
@available_threads += 1
@availability.signal
end
+
+ report_metrics
end
end
end
diff --git a/lib/temporal/version.rb b/lib/temporal/version.rb
index baa4f079..eb368292 100644
--- a/lib/temporal/version.rb
+++ b/lib/temporal/version.rb
@@ -1,3 +1,3 @@
module Temporal
- VERSION = '0.0.1'.freeze
+ VERSION = '0.1.1'.freeze
end
diff --git a/lib/temporal/worker.rb b/lib/temporal/worker.rb
index 81881b67..e9a3b2f3 100644
--- a/lib/temporal/worker.rb
+++ b/lib/temporal/worker.rb
@@ -1,3 +1,4 @@
+require 'temporal/errors'
require 'temporal/workflow/poller'
require 'temporal/activity/poller'
require 'temporal/execution_options'
@@ -8,75 +9,151 @@ module Temporal
class Worker
# activity_thread_pool_size: number of threads that the poller can use to run activities.
# can be set to 1 if you want no paralellism in your activities, at the cost of throughput.
+ #
+ # binary_checksum: The binary checksum identifies the version of workflow worker code. It is set on each completed or failed workflow
+ # task. It is present in API responses that return workflow execution info, and is shown in temporal-web and tctl.
+ # It is traditionally a checksum of the application binary. However, Temporal server treats this as an opaque
+ # identifier and it does not have to be a "checksum". Typical values for a Ruby application might include the hash
+ # of the latest git commit or a semantic version number.
+ #
+ # It can be used to reset workflow history to before a "bad binary" was deployed. Bad checksum values can also
+ # be marked at the namespace level. This will cause Temporal server to reject any polling for workflow tasks
+ # from workers with these bad versions.
+ #
+ # See https://docs.temporal.io/docs/tctl/how-to-use-tctl/#recovery-from-bad-deployment----auto-reset-workflow
+ #
+ # activity_max_tasks_per_second: Optional: Sets the rate limiting on number of activities that can be executed per second
+ #
+ # This limits new activities being started and activity attempts being scheduled. It does NOT
+ # limit the number of concurrent activities being executed on this task queue.
+ #
+ # This is managed by the server and controls activities per second for the entire task queue
+ # across all the workers. Notice that the number is represented in double, so that you can set
+ # it to less than 1 if needed. For example, set the number to 0.1 means you want your activity
+ # to be executed once every 10 seconds. This can be used to protect down stream services from
+ # flooding. The zero value of this uses the default value. Default is unlimited.
def initialize(
config = Temporal.configuration,
activity_thread_pool_size: Temporal::Activity::Poller::DEFAULT_OPTIONS[:thread_pool_size],
- workflow_thread_pool_size: Temporal::Workflow::Poller::DEFAULT_OPTIONS[:thread_pool_size]
+ workflow_thread_pool_size: Temporal::Workflow::Poller::DEFAULT_OPTIONS[:thread_pool_size],
+ binary_checksum: Temporal::Workflow::Poller::DEFAULT_OPTIONS[:binary_checksum],
+ activity_poll_retry_seconds: Temporal::Activity::Poller::DEFAULT_OPTIONS[:poll_retry_seconds],
+ workflow_poll_retry_seconds: Temporal::Workflow::Poller::DEFAULT_OPTIONS[:poll_retry_seconds],
+ activity_max_tasks_per_second: Temporal::Activity::Poller::DEFAULT_OPTIONS[:max_tasks_per_second]
)
@config = config
@workflows = Hash.new { |hash, key| hash[key] = ExecutableLookup.new }
@activities = Hash.new { |hash, key| hash[key] = ExecutableLookup.new }
@pollers = []
@workflow_task_middleware = []
+ @workflow_middleware = []
@activity_middleware = []
@shutting_down = false
@activity_poller_options = {
thread_pool_size: activity_thread_pool_size,
+ poll_retry_seconds: activity_poll_retry_seconds,
+ max_tasks_per_second: activity_max_tasks_per_second
}
@workflow_poller_options = {
thread_pool_size: workflow_thread_pool_size,
+ binary_checksum: binary_checksum,
+ poll_retry_seconds: workflow_poll_retry_seconds
}
+ @start_stop_mutex = Mutex.new
end
def register_workflow(workflow_class, options = {})
- execution_options = ExecutionOptions.new(workflow_class, options, config.default_execution_options)
- key = [execution_options.namespace, execution_options.task_queue]
+ namespace_and_task_queue, execution_options = executable_registration(workflow_class, options)
- @workflows[key].add(execution_options.name, workflow_class)
+ @workflows[namespace_and_task_queue].add(execution_options.name, workflow_class)
+ end
+
+ # Register one special workflow that you want to intercept any unknown workflows,
+ # perhaps so you can delegate work to other classes, somewhat analogous to ruby's method_missing.
+ # Only one dynamic Workflow may be registered per task queue.
+ # Within Workflow#execute, you may retrieve the name of the unknown class via workflow.name.
+ def register_dynamic_workflow(workflow_class, options = {})
+ namespace_and_task_queue, execution_options = executable_registration(workflow_class, options)
+
+ begin
+ @workflows[namespace_and_task_queue].add_dynamic(execution_options.name, workflow_class)
+ rescue Temporal::ExecutableLookup::SecondDynamicExecutableError => e
+ raise Temporal::SecondDynamicWorkflowError,
+ "Temporal::Worker#register_dynamic_workflow: cannot register #{execution_options.name} "\
+ "dynamically; #{e.previous_executable_name} was already registered dynamically for task queue "\
+ "'#{execution_options.task_queue}', and there can be only one."
+ end
end
def register_activity(activity_class, options = {})
- execution_options = ExecutionOptions.new(activity_class, options, config.default_execution_options)
- key = [execution_options.namespace, execution_options.task_queue]
+ namespace_and_task_queue, execution_options = executable_registration(activity_class, options)
+ @activities[namespace_and_task_queue].add(execution_options.name, activity_class)
+ end
- @activities[key].add(execution_options.name, activity_class)
+ # Register one special activity that you want to intercept any unknown activities,
+ # perhaps so you can delegate work to other classes, somewhat analogous to ruby's method_missing.
+ # Only one dynamic Activity may be registered per task queue.
+ # Within Activity#execute, you may retrieve the name of the unknown class via activity.name.
+ def register_dynamic_activity(activity_class, options = {})
+ namespace_and_task_queue, execution_options = executable_registration(activity_class, options)
+ begin
+ @activities[namespace_and_task_queue].add_dynamic(execution_options.name, activity_class)
+ rescue Temporal::ExecutableLookup::SecondDynamicExecutableError => e
+ raise Temporal::SecondDynamicActivityError,
+ "Temporal::Worker#register_dynamic_activity: cannot register #{execution_options.name} "\
+ "dynamically; #{e.previous_executable_name} was already registered dynamically for task queue "\
+ "'#{execution_options.task_queue}', and there can be only one."
+ end
end
def add_workflow_task_middleware(middleware_class, *args)
@workflow_task_middleware << Middleware::Entry.new(middleware_class, args)
end
+ def add_workflow_middleware(middleware_class, *args)
+ @workflow_middleware << Middleware::Entry.new(middleware_class, args)
+ end
+
def add_activity_middleware(middleware_class, *args)
@activity_middleware << Middleware::Entry.new(middleware_class, args)
end
def start
- workflows.each_pair do |(namespace, task_queue), lookup|
- pollers << workflow_poller_for(namespace, task_queue, lookup)
- end
+ @start_stop_mutex.synchronize do
+ return if shutting_down? # Handle the case where stop method grabbed the mutex first
- activities.each_pair do |(namespace, task_queue), lookup|
- pollers << activity_poller_for(namespace, task_queue, lookup)
- end
+ trap_signals
- trap_signals
+ workflows.each_pair do |(namespace, task_queue), lookup|
+ pollers << workflow_poller_for(namespace, task_queue, lookup)
+ end
- pollers.each(&:start)
+ activities.each_pair do |(namespace, task_queue), lookup|
+ pollers << activity_poller_for(namespace, task_queue, lookup)
+ end
+
+ pollers.each(&:start)
+ end
+ on_started_hook
# keep the main thread alive
- sleep 1 while !shutting_down?
+ sleep 1 until shutting_down?
end
def stop
@shutting_down = true
Thread.new do
- pollers.each(&:stop_polling)
- # allow workers to drain in-transit tasks.
- # https://github.com/temporalio/temporal/issues/1058
- sleep 1
- pollers.each(&:cancel_pending_requests)
- pollers.each(&:wait)
+ @start_stop_mutex.synchronize do
+ pollers.each(&:stop_polling)
+ while_stopping_hook
+ # allow workers to drain in-transit tasks.
+ # https://github.com/temporalio/temporal/issues/1058
+ sleep 1
+ pollers.each(&:cancel_pending_requests)
+ pollers.each(&:wait)
+ end
+ on_stopped_hook
end.join
end
@@ -84,20 +161,31 @@ def stop
attr_reader :config, :activity_poller_options, :workflow_poller_options,
:activities, :workflows, :pollers,
- :workflow_task_middleware, :activity_middleware
+ :workflow_task_middleware, :workflow_middleware, :activity_middleware
def shutting_down?
@shutting_down
end
+ def on_started_hook; end
+ def while_stopping_hook; end
+ def on_stopped_hook; end
+
def workflow_poller_for(namespace, task_queue, lookup)
- Workflow::Poller.new(namespace, task_queue, lookup.freeze, config, workflow_task_middleware, workflow_poller_options)
+ Workflow::Poller.new(namespace, task_queue, lookup.freeze, config, workflow_task_middleware, workflow_middleware,
+ workflow_poller_options)
end
def activity_poller_for(namespace, task_queue, lookup)
Activity::Poller.new(namespace, task_queue, lookup.freeze, config, activity_middleware, activity_poller_options)
end
+ def executable_registration(executable_class, options)
+ execution_options = ExecutionOptions.new(executable_class, options, config.default_execution_options)
+ key = [execution_options.namespace, execution_options.task_queue]
+ [key, execution_options]
+ end
+
def trap_signals
%w[TERM INT].each do |signal|
Signal.trap(signal) { stop }
diff --git a/lib/temporal/workflow.rb b/lib/temporal/workflow.rb
index 06bf2b80..c135a19c 100644
--- a/lib/temporal/workflow.rb
+++ b/lib/temporal/workflow.rb
@@ -1,3 +1,4 @@
+require 'temporal/callable'
require 'temporal/concerns/executable'
require 'temporal/workflow/convenience_methods'
require 'temporal/thread_local_context'
@@ -13,14 +14,16 @@ def self.execute_in_context(context, input)
Temporal::ThreadLocalContext.set(context)
workflow = new(context)
- result = workflow.execute(*input)
+ callable = Temporal::Callable.new(method: workflow.method(:execute))
+
+ result = callable.call(input)
context.complete(result) unless context.completed?
rescue StandardError, ScriptError => error
Temporal.logger.error("Workflow execution failed", context.metadata.to_h.merge(error: error.inspect))
Temporal.logger.debug(error.backtrace.join("\n"))
- Temporal::ErrorHandler.handle(error, metadata: context.metadata)
+ Temporal::ErrorHandler.handle(error, context.config, metadata: context.metadata)
context.fail(error)
ensure
diff --git a/lib/temporal/workflow/child_workflow_future.rb b/lib/temporal/workflow/child_workflow_future.rb
new file mode 100644
index 00000000..3e56a835
--- /dev/null
+++ b/lib/temporal/workflow/child_workflow_future.rb
@@ -0,0 +1,18 @@
+require 'fiber'
+require 'temporal/workflow/future'
+
+module Temporal
+ class Workflow
+ # A future that represents a child workflow execution
+ class ChildWorkflowFuture < Future
+ attr_reader :child_workflow_execution_future
+
+ def initialize(target, context, cancelation_id: nil)
+ super
+
+ # create a future which will keep track of when the child workflow starts
+ @child_workflow_execution_future = Future.new(target, context, cancelation_id: cancelation_id)
+ end
+ end
+ end
+end
diff --git a/lib/temporal/workflow/command.rb b/lib/temporal/workflow/command.rb
index 0f0d6ed9..9d33aaea 100644
--- a/lib/temporal/workflow/command.rb
+++ b/lib/temporal/workflow/command.rb
@@ -2,15 +2,17 @@ module Temporal
class Workflow
module Command
# TODO: Move these classes into their own directories under workflow/command/*
- ScheduleActivity = Struct.new(:activity_type, :activity_id, :input, :namespace, :task_queue, :retry_policy, :timeouts, :headers, keyword_init: true)
- StartChildWorkflow = Struct.new(:workflow_type, :workflow_id, :input, :namespace, :task_queue, :retry_policy, :timeouts, :headers, keyword_init: true)
- ContinueAsNew = Struct.new(:workflow_type, :task_queue, :input, :timeouts, :retry_policy, :headers, keyword_init: true)
+ ScheduleActivity = Struct.new(:activity_type, :activity_id, :input, :task_queue, :retry_policy, :timeouts, :headers, keyword_init: true)
+ StartChildWorkflow = Struct.new(:workflow_type, :workflow_id, :input, :namespace, :task_queue, :retry_policy, :parent_close_policy, :timeouts, :headers, :cron_schedule, :memo, :workflow_id_reuse_policy, :search_attributes, keyword_init: true)
+ ContinueAsNew = Struct.new(:workflow_type, :task_queue, :input, :timeouts, :retry_policy, :headers, :memo, :search_attributes, keyword_init: true)
RequestActivityCancellation = Struct.new(:activity_id, keyword_init: true)
RecordMarker = Struct.new(:name, :details, keyword_init: true)
StartTimer = Struct.new(:timeout, :timer_id, keyword_init: true)
CancelTimer = Struct.new(:timer_id, keyword_init: true)
CompleteWorkflow = Struct.new(:result, keyword_init: true)
FailWorkflow = Struct.new(:exception, keyword_init: true)
+ SignalExternalWorkflow = Struct.new(:namespace, :execution, :signal_name, :input, :child_workflow_only, keyword_init: true)
+ UpsertSearchAttributes = Struct.new(:search_attributes, keyword_init: true)
# only these commands are supported right now
SCHEDULE_ACTIVITY_TYPE = :schedule_activity
@@ -21,6 +23,8 @@ module Command
CANCEL_TIMER_TYPE = :cancel_timer
COMPLETE_WORKFLOW_TYPE = :complete_workflow
FAIL_WORKFLOW_TYPE = :fail_workflow
+ SIGNAL_EXTERNAL_WORKFLOW_TYPE = :signal_external_workflow
+ UPSERT_SEARCH_ATTRIBUTES_TYPE = :upsert_search_attributes
COMMAND_CLASS_MAP = {
SCHEDULE_ACTIVITY_TYPE => ScheduleActivity,
@@ -30,7 +34,9 @@ module Command
START_TIMER_TYPE => StartTimer,
CANCEL_TIMER_TYPE => CancelTimer,
COMPLETE_WORKFLOW_TYPE => CompleteWorkflow,
- FAIL_WORKFLOW_TYPE => FailWorkflow
+ FAIL_WORKFLOW_TYPE => FailWorkflow,
+ SIGNAL_EXTERNAL_WORKFLOW_TYPE => SignalExternalWorkflow,
+ UPSERT_SEARCH_ATTRIBUTES_TYPE => UpsertSearchAttributes,
}.freeze
def self.generate(type, **args)
diff --git a/lib/temporal/workflow/command_state_machine.rb b/lib/temporal/workflow/command_state_machine.rb
index 09366cad..69bb2528 100644
--- a/lib/temporal/workflow/command_state_machine.rb
+++ b/lib/temporal/workflow/command_state_machine.rb
@@ -9,6 +9,7 @@ class CommandStateMachine
CANCELED_STATE = :canceled
FAILED_STATE = :failed
TIMED_OUT_STATE = :timed_out
+ TERMINATED_STATE = :terminated
attr_reader :state
@@ -36,6 +37,10 @@ def cancel
@state = CANCELED_STATE
end
+ def terminated
+ @state = TERMINATED_STATE
+ end
+
def fail
@state = FAILED_STATE
end
@@ -43,6 +48,14 @@ def fail
def time_out
@state = TIMED_OUT_STATE
end
+
+ def closed?
+ @state == COMPLETED_STATE ||
+ @state == CANCELED_STATE ||
+ @state == FAILED_STATE ||
+ @state == TIMED_OUT_STATE ||
+ @state == TERMINATED_STATE
+ end
end
end
end
diff --git a/lib/temporal/workflow/context.rb b/lib/temporal/workflow/context.rb
index d4541840..07b917a2 100644
--- a/lib/temporal/workflow/context.rb
+++ b/lib/temporal/workflow/context.rb
@@ -1,13 +1,18 @@
require 'securerandom'
+require 'temporal/activity/context'
require 'temporal/execution_options'
require 'temporal/errors'
require 'temporal/thread_local_context'
require 'temporal/workflow/history/event_target'
require 'temporal/workflow/command'
+require 'temporal/workflow/context_helpers'
require 'temporal/workflow/future'
+require 'temporal/workflow/child_workflow_future'
require 'temporal/workflow/replay_aware_logger'
+require 'temporal/workflow/stack_trace_tracker'
require 'temporal/workflow/state_manager'
+require 'temporal/workflow/signal'
# This context class is available in the workflow implementation
# and provides context and methods for interacting with Temporal
@@ -15,15 +20,26 @@
module Temporal
class Workflow
class Context
- attr_reader :metadata
+ attr_reader :metadata, :config
- def initialize(state_manager, dispatcher, workflow_class, metadata, config)
+ def initialize(state_manager, dispatcher, workflow_class, metadata, config, query_registry, track_stack_trace)
@state_manager = state_manager
@dispatcher = dispatcher
+ @query_registry = query_registry
@workflow_class = workflow_class
@metadata = metadata
@completed = false
@config = config
+
+ if track_stack_trace
+ @stack_trace_tracker = StackTraceTracker.new
+ else
+ @stack_trace_tracker = nil
+ end
+
+ query_registry.register(StackTraceTracker::STACK_TRACE_QUERY_NAME) do
+ stack_trace_tracker&.to_s
+ end
end
def completed?
@@ -31,8 +47,10 @@ def completed?
end
def logger
- @logger ||= ReplayAwareLogger.new(Temporal.logger)
- @logger.replay = state_manager.replay?
+ @logger ||= ReplayAwareLogger.new(
+ @config.logger,
+ replaying: -> { state_manager.replay? && !@config.log_on_workflow_replay }
+ )
@logger
end
@@ -40,10 +58,23 @@ def headers
metadata.headers
end
+ # Retrieves a hash of all current search attributes on this workflow run. Attributes
+ # can be set in a workflow by calling upsert_search_attributes or when starting a
+ # workflow by specifying the search_attributes option.
+ def search_attributes
+ state_manager.search_attributes
+ end
+
def has_release?(release_name)
state_manager.release?(release_name.to_s)
end
+ # Returns information about the workflow run's history up to this point. This can be used to
+ # determine when to continue as new.
+ def history_size
+ state_manager.history_size
+ end
+
def execute_activity(activity_class, *input, **args)
options = args.delete(:options) || {}
input << args unless args.empty?
@@ -54,11 +85,10 @@ def execute_activity(activity_class, *input, **args)
activity_id: options[:activity_id],
activity_type: execution_options.name,
input: input,
- namespace: execution_options.namespace,
task_queue: execution_options.task_queue,
retry_policy: execution_options.retry_policy,
timeouts: execution_options.timeouts,
- headers: execution_options.headers
+ headers: config.header_propagator_chain.inject(execution_options.headers)
)
target, cancelation_id = schedule_command(command)
@@ -92,7 +122,7 @@ def execute_local_activity(activity_class, *input, **args)
side_effect do
# TODO: this probably requires a local context implementation
- context = Activity::Context.new(nil, nil)
+ context = Activity::Context.new(nil, nil, nil, nil)
activity_class.execute_in_context(context, input)
end
end
@@ -101,6 +131,9 @@ def execute_workflow(workflow_class, *input, **args)
options = args.delete(:options) || {}
input << args unless args.empty?
+ parent_close_policy = options.delete(:parent_close_policy)
+ cron_schedule = options.delete(:cron_schedule)
+ workflow_id_reuse_policy = options.delete(:workflow_id_reuse_policy)
execution_options = ExecutionOptions.new(workflow_class, options, config.default_execution_options)
command = Command::StartChildWorkflow.new(
@@ -110,24 +143,42 @@ def execute_workflow(workflow_class, *input, **args)
namespace: execution_options.namespace,
task_queue: execution_options.task_queue,
retry_policy: execution_options.retry_policy,
+ parent_close_policy: parent_close_policy,
timeouts: execution_options.timeouts,
- headers: execution_options.headers
+ headers: config.header_propagator_chain.inject(execution_options.headers),
+ cron_schedule: cron_schedule,
+ memo: execution_options.memo,
+ workflow_id_reuse_policy: workflow_id_reuse_policy,
+ search_attributes: Helpers.process_search_attributes(execution_options.search_attributes),
)
target, cancelation_id = schedule_command(command)
- future = Future.new(target, self, cancelation_id: cancelation_id)
+
+ child_workflow_future = ChildWorkflowFuture.new(target, self, cancelation_id: cancelation_id)
dispatcher.register_handler(target, 'completed') do |result|
- future.set(result)
- future.success_callbacks.each { |callback| call_in_fiber(callback, result) }
+ child_workflow_future.set(result)
+ child_workflow_future.success_callbacks.each { |callback| call_in_fiber(callback, result) }
end
dispatcher.register_handler(target, 'failed') do |exception|
- future.fail(exception)
- future.failure_callbacks.each { |callback| call_in_fiber(callback, exception) }
+ # if the child workflow didn't start already then also fail that future
+ unless child_workflow_future.child_workflow_execution_future.ready?
+ child_workflow_future.child_workflow_execution_future.fail(exception)
+ child_workflow_future.child_workflow_execution_future.failure_callbacks.each { |callback| call_in_fiber(callback, exception) }
+ end
+
+ child_workflow_future.fail(exception)
+ child_workflow_future.failure_callbacks.each { |callback| call_in_fiber(callback, exception) }
end
- future
+ dispatcher.register_handler(target, 'started') do |event|
+ # once the workflow starts, complete the child workflow execution future
+ child_workflow_future.child_workflow_execution_future.set(event)
+ child_workflow_future.child_workflow_execution_future.success_callbacks.each { |callback| call_in_fiber(callback, result) }
+ end
+
+ child_workflow_future
end
def execute_workflow!(workflow_class, *input, **args)
@@ -139,6 +190,11 @@ def execute_workflow!(workflow_class, *input, **args)
result
end
+ def schedule_workflow(workflow_class, cron_schedule, *input, **args)
+ args[:options] = (args[:options] || {}).merge(cron_schedule: cron_schedule)
+ execute_workflow(workflow_class, *input, **args)
+ end
+
def side_effect(&block)
marker = state_manager.next_side_effect
return marker.last if marker
@@ -172,6 +228,10 @@ def start_timer(timeout, timer_id = nil)
future
end
+ def name
+ @metadata.name
+ end
+
def cancel_timer(timer_id)
command = Command::CancelTimer.new(timer_id: timer_id)
schedule_command(command)
@@ -195,6 +255,13 @@ def continue_as_new(*input, **args)
options = args.delete(:options) || {}
input << args unless args.empty?
+ # If memo or headers are not overridden, use those from the current run
+ options_from_metadata = {
+ memo: metadata.memo,
+ headers: metadata.headers,
+ }
+ options = options_from_metadata.merge(options)
+
execution_options = ExecutionOptions.new(workflow_class, options, config.default_execution_options)
command = Command::ContinueAsNew.new(
@@ -203,26 +270,71 @@ def continue_as_new(*input, **args)
input: input,
timeouts: execution_options.timeouts,
retry_policy: execution_options.retry_policy,
- headers: execution_options.headers
+ headers: config.header_propagator_chain.inject(execution_options.headers),
+ memo: execution_options.memo,
+ search_attributes: Helpers.process_search_attributes(execution_options.search_attributes)
)
schedule_command(command)
completed!
end
+ # Block workflow progress until all futures finish
def wait_for_all(*futures)
futures.each(&:wait)
return
end
- def wait_for(future)
+ # Block workflow progress until one of the futures completes. Passing
+ # in an empty array will immediately unblock.
+ def wait_for_any(*futures)
+ return if futures.empty? || futures.any?(&:finished?)
+
+ fiber = Fiber.current
+
+ handlers = futures.map do |future|
+ dispatcher.register_handler(future.target, Dispatcher::WILDCARD) do
+ fiber.resume if future.finished?
+ end
+ end
+
+ stack_trace_tracker&.record
+ begin
+ Fiber.yield
+ ensure
+ stack_trace_tracker&.clear
+ handlers.each(&:unregister)
+ end
+
+ return
+ end
+
+ # Block workflow progress until the specified block evaluates to true.
+ def wait_until(&unblock_condition)
+ raise 'You must pass a block to wait_until' if unblock_condition.nil?
+
+ return if unblock_condition.call
+
fiber = Fiber.current
- dispatcher.register_handler(future.target, Dispatcher::WILDCARD) do
- fiber.resume if future.finished?
+ # wait_until condition blocks often read state modified by target-specfic handlers like
+ # signal handlers or callbacks for timer or activity completion. Running the wait_until
+ # handlers after the other handlers ensures that state is correctly updated before being
+ # read.
+ handler = dispatcher.register_handler(
+ Dispatcher::WILDCARD, # any target
+ Dispatcher::WILDCARD, # any event type
+ Dispatcher::Order::AT_END) do
+ fiber.resume if unblock_condition.call
end
- Fiber.yield
+ stack_trace_tracker&.record
+ begin
+ Fiber.yield
+ ensure
+ stack_trace_tracker&.clear
+ handler.unregister
+ end
return
end
@@ -231,12 +343,47 @@ def now
state_manager.local_time
end
- def on_signal(&block)
- target = History::EventTarget.workflow
+ # Define a signal handler to receive signals onto the workflow. When
+ # +name+ is defined, this creates a named signal handler which will be
+ # invoked whenever a signal named +name+ is received. A handler without
+ # a set name (defaults to nil) will be the default handler and will receive
+ # all signals that do not match a named signal handler.
+ #
+ # @param signal_name [String, Symbol, nil] an optional signal name; converted to a String
+ def on_signal(signal_name = nil, &block)
+ first_task_signals = if state_manager.sdk_flags.include?(SDKFlags::SAVE_FIRST_TASK_SIGNALS)
+ state_manager.first_task_signals
+ else
+ []
+ end
- dispatcher.register_handler(target, 'signaled') do |signal, input|
- call_in_fiber(block, signal, input)
+ if signal_name
+ target = Signal.new(signal_name)
+ dispatcher.register_handler(target, 'signaled') do |_, input|
+ # do not pass signal name when triggering a named handler
+ call_in_fiber(block, input)
+ end
+
+ first_task_signals.each do |name, input|
+ if name == signal_name
+ call_in_fiber(block, input)
+ end
+ end
+ else
+ dispatcher.register_handler(Dispatcher::WILDCARD, 'signaled') do |signal, input|
+ call_in_fiber(block, signal, input)
+ end
+
+ first_task_signals.each do |name, input|
+ call_in_fiber(block, name, input)
+ end
end
+
+ return
+ end
+
+ def on_query(query, &block)
+ query_registry.register(query, &block)
end
def cancel_activity(activity_id)
@@ -256,9 +403,87 @@ def cancel(target, cancelation_id)
end
end
+ # Send a signal from inside a workflow to another workflow. Not to be confused with
+ # Client#signal_workflow which sends a signal from outside a workflow to a workflow.
+ #
+ # @param workflow [Temporal::Workflow, nil] workflow class or nil
+ # @param signal [String] name of the signal to send
+ # @param workflow_id [String]
+ # @param run_id [String]
+ # @param input [String, Array, nil] optional arguments for the signal
+ # @param namespace [String, nil] if nil, choose the one declared on the workflow class or the
+ # global default
+ # @param child_workflow_only [Boolean] indicates whether the signal should only be delivered to a
+ # child workflow; defaults to false
+ #
+ # @return [Future] future
+ def signal_external_workflow(workflow, signal, workflow_id, run_id = nil, input = nil, namespace: nil, child_workflow_only: false)
+ execution_options = ExecutionOptions.new(workflow, {}, config.default_execution_options)
+
+ command = Command::SignalExternalWorkflow.new(
+ namespace: namespace || execution_options.namespace,
+ execution: {
+ workflow_id: workflow_id,
+ run_id: run_id
+ },
+ signal_name: signal,
+ input: input,
+ child_workflow_only: child_workflow_only
+ )
+
+ target, cancelation_id = schedule_command(command)
+ future = Future.new(target, self, cancelation_id: cancelation_id)
+
+ dispatcher.register_handler(target, 'completed') do |result|
+ future.set(result)
+ future.success_callbacks.each { |callback| call_in_fiber(callback, result) }
+ end
+
+ dispatcher.register_handler(target, 'failed') do |exception|
+ future.fail(exception)
+ future.failure_callbacks.each { |callback| call_in_fiber(callback, exception) }
+ end
+
+ future
+ end
+
+ # Replaces or adds the values of your custom search attributes specified during a workflow's execution.
+ # To use this your server must enable advanced visibility using SQL starting with version 1.20 or
+ # Elasticsearch on all versions. The attributes must be pre-configured.
+ # See https://docs.temporal.io/docs/concepts/what-is-a-search-attribute/
+ #
+ # Do be aware that non-deterministic upserting of search attributes can lead to "phantom"
+ # attributes that are available in code but not on Temporal server. For example, if your code
+ # upserted {"foo" => 1} then changed to upsert {"bar" => 2} without proper versioning, you
+ # will see {"foo" => 1, "bar" => 2} in search attributes in workflow code even though
+ # {"bar" => 2} was never upserted on Temporal server. When the same search attribute
+ # name is used with a different value, you will see a similar case where the new value will
+ # be present until the end of the history window, then change to the old version after that. This
+ # does at least match the "old" value that will be present on the server.
+ #
+ # @param search_attributes [Hash]
+ # If an attribute is registered as a Datetime, you can pass in a Time: e.g.
+ # workflow.now
+ # or as a string in UTC ISO-8601 format:
+ # workflow.now.utc.iso8601
+ # It would look like: "2022-03-01T17:39:06Z"
+ # @return [Hash] the search attributes after any preprocessing.
+ #
+ def upsert_search_attributes(search_attributes)
+ search_attributes = Helpers.process_search_attributes(search_attributes)
+ if search_attributes.empty?
+ raise ArgumentError, "Cannot upsert an empty hash for search_attributes, as this would do nothing."
+ end
+ command = Command::UpsertSearchAttributes.new(
+ search_attributes: search_attributes
+ )
+ schedule_command(command)
+ search_attributes
+ end
+
private
- attr_reader :state_manager, :dispatcher, :workflow_class, :config
+ attr_reader :state_manager, :dispatcher, :workflow_class, :query_registry, :stack_trace_tracker
def completed!
@completed = true
diff --git a/lib/temporal/workflow/context_helpers.rb b/lib/temporal/workflow/context_helpers.rb
new file mode 100644
index 00000000..0006606d
--- /dev/null
+++ b/lib/temporal/workflow/context_helpers.rb
@@ -0,0 +1,27 @@
+require 'time'
+module Temporal
+ class Workflow
+ class Context
+ # Shared between Context, and LocalWorkflowContext, and Client so we can do the same validations in test and production.
+ module Helpers
+
+ def self.process_search_attributes(search_attributes)
+ if search_attributes.nil?
+ raise ArgumentError, 'search_attributes cannot be nil'
+ end
+ if !search_attributes.is_a?(Hash)
+ raise ArgumentError, "for search_attributes, expecting a Hash, not #{search_attributes.class}"
+ end
+ search_attributes.transform_values do |attribute|
+ if attribute.is_a?(Time)
+ # The server expects UTC times in the standard format.
+ attribute.utc.iso8601
+ else
+ attribute
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/workflow/convenience_methods.rb b/lib/temporal/workflow/convenience_methods.rb
index 93f97812..90741f34 100644
--- a/lib/temporal/workflow/convenience_methods.rb
+++ b/lib/temporal/workflow/convenience_methods.rb
@@ -29,6 +29,13 @@ def execute!(*input, **args)
context.execute_workflow!(self, *input, **args)
end
+
+ def schedule(cron_schedule, *input, **args)
+ context = Temporal::ThreadLocalContext.get
+ raise 'Called Workflow#schedule outside of a Workflow context' unless context
+
+ context.schedule_workflow(self, cron_schedule, *input, **args)
+ end
end
end
end
diff --git a/lib/temporal/workflow/dispatcher.rb b/lib/temporal/workflow/dispatcher.rb
index 55c581fb..eb72b4e3 100644
--- a/lib/temporal/workflow/dispatcher.rb
+++ b/lib/temporal/workflow/dispatcher.rb
@@ -1,14 +1,57 @@
+require 'temporal/errors'
+
module Temporal
class Workflow
+ # This provides a generic event dispatcher mechanism. There are two main entry
+ # points to this class, #register_handler and #dispatch.
+ #
+ # A handler may be associated with a specific event name so when that event occurs
+ # elsewhere in the system we may dispatch the event and execute the handler.
+ # We *always* execute the handler associated with the event_name.
+ #
class Dispatcher
+ # Raised if a duplicate ID is encountered during dispatch handling.
+ # This likely indicates a bug in temporal-ruby or that unsupported multithreaded
+ # workflow code is being used.
+ class DuplicateIDError < InternalError; end
+
+ # Tracks a registered handle so that it can be unregistered later
+ # The handlers are passed by reference here to be mutated (removed) by the
+ # unregister call below.
+ class RegistrationHandle
+ def initialize(handlers_for_target, id)
+ @handlers_for_target = handlers_for_target
+ @id = id
+ end
+
+ # Unregister the handler from the dispatcher
+ def unregister
+ handlers_for_target.delete(id)
+ end
+
+ private
+
+ attr_reader :handlers_for_target, :id
+ end
+
WILDCARD = '*'.freeze
+ module Order
+ AT_BEGINNING = 1
+ AT_END = 2
+ end
+
+ EventStruct = Struct.new(:event_name, :handler, :order)
+
def initialize
- @handlers = Hash.new { |hash, key| hash[key] = [] }
+ @event_handlers = Hash.new { |hash, key| hash[key] = {} }
+ @next_id = 0
end
- def register_handler(target, event_name, &handler)
- handlers[target] << [event_name, handler]
+ def register_handler(target, event_name, order=Order::AT_BEGINNING, &handler)
+ @next_id += 1
+ event_handlers[target][@next_id] = EventStruct.new(event_name, handler, order)
+ RegistrationHandle.new(event_handlers[target], @next_id)
end
def dispatch(target, event_name, args = nil)
@@ -19,12 +62,19 @@ def dispatch(target, event_name, args = nil)
private
- attr_reader :handlers
+ attr_reader :event_handlers
def handlers_for(target, event_name)
- handlers[target]
- .select { |(name, _)| name == event_name || name == WILDCARD }
- .map(&:last)
+ event_handlers[target]
+ .merge(event_handlers[WILDCARD]) { raise DuplicateIDError.new('Cannot resolve duplicate dispatcher handler IDs') }
+ .select { |_, event| match?(event, event_name) }
+ .sort_by{ |id, event_struct| [event_struct.order, id]}
+ .map { |_, event| event.handler }
+ end
+
+ def match?(event_struct, event_name)
+ event_struct.event_name == event_name ||
+ event_struct.event_name == WILDCARD
end
end
end
diff --git a/lib/temporal/workflow/errors.rb b/lib/temporal/workflow/errors.rb
index d7c294f5..f13f03bf 100644
--- a/lib/temporal/workflow/errors.rb
+++ b/lib/temporal/workflow/errors.rb
@@ -3,39 +3,51 @@
module Temporal
class Workflow
class Errors
- extend Concerns::Payloads
-
# Convert a failure returned from the server to an Error to raise to the client
- # failure: Temporal::Api::Failure::V1::Failure
- def self.generate_error(failure, default_exception_class = StandardError)
+ # failure: Temporalio::Api::Failure::V1::Failure
+ def self.generate_error(failure, converter, default_exception_class = StandardError)
case failure.failure_info
when :application_failure_info
- message = from_details_payloads(failure.application_failure_info.details)
- exception_class = safe_constantize(failure.application_failure_info.type)
+ error_type = failure.application_failure_info.type
+ exception_class = safe_constantize(error_type)
+ message = failure.message
+
if exception_class.nil?
Temporal.logger.error(
- "Could not find original error class. Defaulting to StandardError.",
- {original_error: failure.application_failure_info.type},
+ 'Could not find original error class. Defaulting to StandardError.',
+ { original_error: error_type }
)
- message = "#{failure.application_failure_info.type}: #{message}"
+ message = "#{error_type}: #{failure.message}"
exception_class = default_exception_class
end
-
-
begin
- exception = exception_class.new(message)
- rescue ArgumentError => deserialization_error
- # We don't currently support serializing/deserializing exceptions with more than one argument.
+ details = failure.application_failure_info.details
+ exception_or_message = converter.from_details_payloads(details)
+ # v1 serialization only supports StandardErrors with a single "message" argument.
+ # v2 serialization supports complex errors using our converters to serialize them.
+ # enable v2 serialization in activities with Temporal::Configuration#use_error_serialization_v2
+ if exception_or_message.is_a?(Exception)
+ exception = exception_or_message
+ else
+ exception = exception_class.new(message)
+ end
+ rescue StandardError => deserialization_error
message = "#{exception_class}: #{message}"
exception = default_exception_class.new(message)
Temporal.logger.error(
- "Could not instantiate original error. Defaulting to StandardError.",
+ "Could not instantiate original error. Defaulting to StandardError. Make sure the worker running " \
+ "your activities is configured with use_error_serialization_v2. If so, make sure the " \
+ "original error serialized by searching your logs for 'unserializable_error'. If not, you're using "\
+ "legacy serialization, and it's likely that "\
+ "your error's initializer takes something other than exactly one positional argument.",
{
- original_error: failure.application_failure_info.type,
+ original_error: error_type,
+ serialized_error: details.payloads.first.data,
+ instantiation_error_class: deserialization_error.class.to_s,
instantiation_error_message: deserialization_error.message,
},
- )
+ )
end
exception.tap do |exception|
backtrace = failure.stack_trace.split("\n")
@@ -45,18 +57,32 @@ def self.generate_error(failure, default_exception_class = StandardError)
TimeoutError.new("Timeout type: #{failure.timeout_failure_info.timeout_type.to_s}")
when :canceled_failure_info
# TODO: Distinguish between different entity cancellations
- StandardError.new(from_payloads(failure.canceled_failure_info.details))
+ StandardError.new(converter.from_payloads(failure.canceled_failure_info.details))
else
StandardError.new(failure.message)
end
end
+ WORKFLOW_ALREADY_EXISTS_SYM = Temporalio::Api::Enums::V1::StartChildWorkflowExecutionFailedCause.lookup(
+ Temporalio::Api::Enums::V1::StartChildWorkflowExecutionFailedCause::START_CHILD_WORKFLOW_EXECUTION_FAILED_CAUSE_WORKFLOW_ALREADY_EXISTS
+ )
+
+ def self.generate_error_for_child_workflow_start(cause, workflow_id)
+ if cause == WORKFLOW_ALREADY_EXISTS_SYM
+ Temporal::WorkflowExecutionAlreadyStartedFailure.new(
+ "The child workflow could not be started - per its workflow_id_reuse_policy, it conflicts with another workflow with the same id: #{workflow_id}",
+ )
+ else
+ # Right now, there's only one cause, but temporal may add more in the future
+ StandardError.new("The child workflow could not be started. Reason: #{cause}")
+ end
+ end
+
private_class_method def self.safe_constantize(const)
Object.const_get(const) if Object.const_defined?(const)
rescue NameError
nil
end
-
end
end
end
diff --git a/lib/temporal/workflow/execution_info.rb b/lib/temporal/workflow/execution_info.rb
index 67cad260..77a27332 100644
--- a/lib/temporal/workflow/execution_info.rb
+++ b/lib/temporal/workflow/execution_info.rb
@@ -1,51 +1,43 @@
+require 'temporal/workflow/status'
+
module Temporal
class Workflow
- class ExecutionInfo < Struct.new(:workflow, :workflow_id, :run_id, :start_time, :close_time, :status, :history_length, keyword_init: true)
- RUNNING_STATUS = :RUNNING
- COMPLETED_STATUS = :COMPLETED
- FAILED_STATUS = :FAILED
- CANCELED_STATUS = :CANCELED
- TERMINATED_STATUS = :TERMINATED
- CONTINUED_AS_NEW_STATUS = :CONTINUED_AS_NEW
- TIMED_OUT_STATUS = :TIMED_OUT
-
- API_STATUS_MAP = {
- WORKFLOW_EXECUTION_STATUS_RUNNING: RUNNING_STATUS,
- WORKFLOW_EXECUTION_STATUS_COMPLETED: COMPLETED_STATUS,
- WORKFLOW_EXECUTION_STATUS_FAILED: FAILED_STATUS,
- WORKFLOW_EXECUTION_STATUS_CANCELED: CANCELED_STATUS,
- WORKFLOW_EXECUTION_STATUS_TERMINATED: TERMINATED_STATUS,
- WORKFLOW_EXECUTION_STATUS_CONTINUED_AS_NEW: CONTINUED_AS_NEW_STATUS,
- WORKFLOW_EXECUTION_STATUS_TIMED_OUT: TIMED_OUT_STATUS
- }.freeze
-
- VALID_STATUSES = [
- RUNNING_STATUS,
- COMPLETED_STATUS,
- FAILED_STATUS,
- CANCELED_STATUS,
- TERMINATED_STATUS,
- CONTINUED_AS_NEW_STATUS,
- TIMED_OUT_STATUS
- ].freeze
+ class ExecutionInfo < Struct.new(:workflow, :workflow_id, :run_id, :start_time, :close_time, :status,
+ :history_length, :memo, :search_attributes, keyword_init: true)
+ STATUSES = [
+ Temporal::Workflow::Status::RUNNING,
+ Temporal::Workflow::Status::COMPLETED,
+ Temporal::Workflow::Status::FAILED,
+ Temporal::Workflow::Status::CANCELED,
+ Temporal::Workflow::Status::TERMINATED,
+ Temporal::Workflow::Status::CONTINUED_AS_NEW,
+ Temporal::Workflow::Status::TIMED_OUT
+ ]
- def self.generate_from(response)
+ def self.generate_from(response, converter)
+ search_attributes = response.search_attributes.nil? ? {} : converter.from_payload_map_without_codec(response.search_attributes.indexed_fields)
new(
workflow: response.type.name,
workflow_id: response.execution.workflow_id,
run_id: response.execution.run_id,
start_time: response.start_time&.to_time,
close_time: response.close_time&.to_time,
- status: API_STATUS_MAP.fetch(response.status),
+ status: Temporal::Workflow::Status::API_STATUS_MAP.fetch(response.status),
history_length: response.history_length,
+ memo: converter.from_payload_map(response.memo.fields),
+ search_attributes: search_attributes
).freeze
end
- VALID_STATUSES.each do |status|
+ STATUSES.each do |status|
define_method("#{status.downcase}?") do
self.status == status
end
end
+
+ def closed?
+ !running?
+ end
end
end
end
diff --git a/lib/temporal/workflow/executions.rb b/lib/temporal/workflow/executions.rb
new file mode 100644
index 00000000..15fb9109
--- /dev/null
+++ b/lib/temporal/workflow/executions.rb
@@ -0,0 +1,67 @@
+require 'temporal/workflow/execution_info'
+
+module Temporal
+ class Workflow
+ class Executions
+ include Enumerable
+
+ DEFAULT_REQUEST_OPTIONS = {
+ next_page_token: nil
+ }.freeze
+
+ def initialize(converter, connection:, status:, request_options:)
+ @converter = converter
+ @connection = connection
+ @status = status
+ @request_options = DEFAULT_REQUEST_OPTIONS.merge(request_options)
+ end
+
+ def next_page_token
+ @request_options[:next_page_token]
+ end
+
+ def next_page
+ self.class.new(@converter, connection: @connection, status: @status, request_options: @request_options.merge(next_page_token: next_page_token))
+ end
+
+ def each
+ api_method =
+ if @status == :open
+ :list_open_workflow_executions
+ elsif @status == :closed
+ :list_closed_workflow_executions
+ else
+ :list_workflow_executions
+ end
+
+ executions = []
+
+ loop do
+ response = @connection.public_send(
+ api_method,
+ **@request_options.merge(next_page_token: @request_options[:next_page_token])
+ )
+
+ paginated_executions = response.executions.map do |raw_execution|
+ execution = Temporal::Workflow::ExecutionInfo.generate_from(raw_execution, @converter)
+ if block_given?
+ yield execution
+ end
+
+ execution
+ end
+
+ @request_options[:next_page_token] = response.next_page_token
+
+ return paginated_executions unless @request_options[:max_page_size].nil? # return after the first loop if set pagination size
+
+ executions += paginated_executions
+
+ break if @request_options[:next_page_token].to_s.empty?
+ end
+
+ executions
+ end
+ end
+ end
+end
diff --git a/lib/temporal/workflow/executor.rb b/lib/temporal/workflow/executor.rb
index c81703cb..f40fef3b 100644
--- a/lib/temporal/workflow/executor.rb
+++ b/lib/temporal/workflow/executor.rb
@@ -1,24 +1,39 @@
require 'fiber'
+require 'temporal/workflow/context'
require 'temporal/workflow/dispatcher'
+require 'temporal/workflow/query_registry'
+require 'temporal/workflow/stack_trace_tracker'
require 'temporal/workflow/state_manager'
-require 'temporal/workflow/context'
require 'temporal/workflow/history/event_target'
+require 'temporal/metadata'
module Temporal
class Workflow
class Executor
- def initialize(workflow_class, history, config)
+ RunResult = Struct.new(:commands, :new_sdk_flags_used, keyword_init: true)
+
+ # @param workflow_class [Class]
+ # @param history [Workflow::History]
+ # @param task_metadata [Metadata::WorkflowTask]
+ # @param config [Configuration]
+ # @param track_stack_trace [Boolean]
+ # @return [RunResult]
+ def initialize(workflow_class, history, task_metadata, config, track_stack_trace, middleware_chain)
@workflow_class = workflow_class
@dispatcher = Dispatcher.new
- @state_manager = StateManager.new(dispatcher)
+ @query_registry = QueryRegistry.new
+ @state_manager = StateManager.new(dispatcher, config)
@history = history
+ @task_metadata = task_metadata
@config = config
+ @track_stack_trace = track_stack_trace
+ @middleware_chain = middleware_chain
end
def run
dispatcher.register_handler(
- History::EventTarget.workflow,
+ History::EventTarget.start_workflow,
'started',
&method(:execute_workflow)
)
@@ -27,18 +42,43 @@ def run
state_manager.apply(window)
end
- return state_manager.commands
+ RunResult.new(commands: state_manager.final_commands, new_sdk_flags_used: state_manager.new_sdk_flags_used)
+ end
+
+ # Process queries using the pre-registered query handlers
+ #
+ # @note this method is expected to be executed after the history has
+ # been fully replayed (by invoking the #run method)
+ #
+ # @param queries [Hash]
+ #
+ # @return [Hash]
+ def process_queries(queries)
+ queries.transform_values(&method(:process_query))
end
private
- attr_reader :workflow_class, :dispatcher, :state_manager, :history, :config
+ attr_reader :workflow_class, :dispatcher, :query_registry, :state_manager,
+ :task_metadata, :history, :config, :track_stack_trace, :middleware_chain
+
+ def process_query(query)
+ result = query_registry.handle(query.query_type, query.query_args)
+
+ QueryResult.answer(result)
+ rescue StandardError => e
+ QueryResult.failure(e)
+ end
- def execute_workflow(input, metadata)
- context = Workflow::Context.new(state_manager, dispatcher, workflow_class, metadata, config)
+ def execute_workflow(input, workflow_started_event)
+ metadata = Metadata.generate_workflow_metadata(workflow_started_event, task_metadata, config.converter)
+ context = Workflow::Context.new(state_manager, dispatcher, workflow_class, metadata, config, query_registry,
+ track_stack_trace)
Fiber.new do
- workflow_class.execute_in_context(context, input)
+ middleware_chain.invoke(metadata) do
+ workflow_class.execute_in_context(context, input)
+ end
end.resume
end
end
diff --git a/lib/temporal/workflow/future.rb b/lib/temporal/workflow/future.rb
index 550a038b..26929e86 100644
--- a/lib/temporal/workflow/future.rb
+++ b/lib/temporal/workflow/future.rb
@@ -31,7 +31,7 @@ def failed?
def wait
return if finished?
- context.wait_for(self)
+ context.wait_for_any(self)
end
def get
diff --git a/lib/temporal/workflow/history.rb b/lib/temporal/workflow/history.rb
index ad188a77..07bbe96d 100644
--- a/lib/temporal/workflow/history.rb
+++ b/lib/temporal/workflow/history.rb
@@ -51,11 +51,15 @@ def next_window
CANCEL_TIMER_FAILED
TIMER_CANCELED
WORKFLOW_EXECUTION_CANCEL_REQUESTED
+ WORKFLOW_EXECUTION_COMPLETED
+ WORKFLOW_EXECUTION_CONTINUED_AS_NEW
+ WORKFLOW_EXECUTION_FAILED
START_CHILD_WORKFLOW_EXECUTION_INITIATED
SIGNAL_EXTERNAL_WORKFLOW_EXECUTION_INITIATED
REQUEST_CANCEL_ACTIVITY_TASK_FAILED
REQUEST_CANCEL_EXTERNAL_WORKFLOW_EXECUTION_INITIATED
MARKER_RECORDED
+ UPSERT_WORKFLOW_SEARCH_ATTRIBUTES
].freeze
attr_reader :iterator
diff --git a/lib/temporal/workflow/history/event.rb b/lib/temporal/workflow/history/event.rb
index be389636..562fd018 100644
--- a/lib/temporal/workflow/history/event.rb
+++ b/lib/temporal/workflow/history/event.rb
@@ -10,10 +10,7 @@ class Event
ACTIVITY_TASK_CANCELED
TIMER_FIRED
REQUEST_CANCEL_EXTERNAL_WORKFLOW_EXECUTION_FAILED
- SIGNAL_EXTERNAL_WORKFLOW_EXECUTION_FAILED
EXTERNAL_WORKFLOW_EXECUTION_CANCEL_REQUESTED
- EXTERNAL_WORKFLOW_EXECUTION_SIGNALED
- UPSERT_WORKFLOW_SEARCH_ATTRIBUTES
].freeze
CHILD_WORKFLOW_EVENTS = %w[
@@ -48,7 +45,7 @@ def originating_event_id
1 # fixed id for everything related to current workflow
when *EVENT_TYPES
attributes.scheduled_event_id
- when *CHILD_WORKFLOW_EVENTS
+ when *CHILD_WORKFLOW_EVENTS, 'EXTERNAL_WORKFLOW_EXECUTION_SIGNALED', 'SIGNAL_EXTERNAL_WORKFLOW_EXECUTION_FAILED'
attributes.initiated_event_id
else
id
diff --git a/lib/temporal/workflow/history/event_target.rb b/lib/temporal/workflow/history/event_target.rb
index a54dab55..881a7823 100644
--- a/lib/temporal/workflow/history/event_target.rb
+++ b/lib/temporal/workflow/history/event_target.rb
@@ -14,12 +14,18 @@ class UnexpectedEventType < InternalError; end
MARKER_TYPE = :marker
EXTERNAL_WORKFLOW_TYPE = :external_workflow
CANCEL_EXTERNAL_WORKFLOW_REQUEST_TYPE = :cancel_external_workflow_request
- WORKFLOW_TYPE = :workflow
CANCEL_WORKFLOW_REQUEST_TYPE = :cancel_workflow_request
+ WORKFLOW_TYPE = :workflow
+ COMPLETE_WORKFLOW_TYPE = :complete_workflow
+ CONTINUE_AS_NEW_WORKFLOW_TYPE = :continue_as_new_workflow
+ FAIL_WORKFLOW_TYPE = :fail_workflow
+ SIGNAL_WORKFLOW_TYPE = :signal_workflow
+ START_WORKFLOW_TYPE = :start_workflow
+ UPSERT_SEARCH_ATTRIBUTES_REQUEST_TYPE = :upsert_search_attributes_request
# NOTE: The order is important, first prefix match wins (will be a longer match)
TARGET_TYPES = {
- 'ACTIVITY_TASK_CANCEL' => CANCEL_ACTIVITY_REQUEST_TYPE,
+ 'ACTIVITY_TASK_CANCEL_REQUESTED' => CANCEL_ACTIVITY_REQUEST_TYPE,
'ACTIVITY_TASK' => ACTIVITY_TYPE,
'REQUEST_CANCEL_ACTIVITY_TASK' => CANCEL_ACTIVITY_REQUEST_TYPE,
'TIMER_CANCELED' => CANCEL_TIMER_REQUEST_TYPE,
@@ -32,15 +38,23 @@ class UnexpectedEventType < InternalError; end
'SIGNAL_EXTERNAL_WORKFLOW_EXECUTION' => EXTERNAL_WORKFLOW_TYPE,
'EXTERNAL_WORKFLOW_EXECUTION_CANCEL' => CANCEL_EXTERNAL_WORKFLOW_REQUEST_TYPE,
'REQUEST_CANCEL_EXTERNAL_WORKFLOW_EXECUTION' => CANCEL_EXTERNAL_WORKFLOW_REQUEST_TYPE,
- 'UPSERT_WORKFLOW_SEARCH_ATTRIBUTES' => WORKFLOW_TYPE,
+ 'UPSERT_WORKFLOW_SEARCH_ATTRIBUTES' => UPSERT_SEARCH_ATTRIBUTES_REQUEST_TYPE,
'WORKFLOW_EXECUTION_CANCEL' => CANCEL_WORKFLOW_REQUEST_TYPE,
+ 'WORKFLOW_EXECUTION_COMPLETED' => COMPLETE_WORKFLOW_TYPE,
+ 'WORKFLOW_EXECUTION_CONTINUED_AS_NEW' => CONTINUE_AS_NEW_WORKFLOW_TYPE,
+ 'WORKFLOW_EXECUTION_FAILED' => FAIL_WORKFLOW_TYPE,
+ 'WORKFLOW_EXECUTION_SIGNALED' => SIGNAL_WORKFLOW_TYPE,
+ 'WORKFLOW_EXECUTION_STARTED' => START_WORKFLOW_TYPE,
+ # This is a fall-through type for various event types that workflow code cannot
+ # react to, either because they're externally triggered (workflow termination,
+ # timeout) or use an unsupported feature (workflow cancellation, updates).
'WORKFLOW_EXECUTION' => WORKFLOW_TYPE,
}.freeze
attr_reader :id, :type
- def self.workflow
- @workflow ||= new(1, WORKFLOW_TYPE)
+ def self.start_workflow
+ @workflow ||= new(1, START_WORKFLOW_TYPE)
end
def self.from_event(event)
@@ -61,7 +75,7 @@ def initialize(id, type)
end
def ==(other)
- id == other.id && type == other.type
+ self.class == other.class && id == other.id && type == other.type
end
def eql?(other)
diff --git a/lib/temporal/workflow/history/serialization.rb b/lib/temporal/workflow/history/serialization.rb
new file mode 100644
index 00000000..2219dddd
--- /dev/null
+++ b/lib/temporal/workflow/history/serialization.rb
@@ -0,0 +1,61 @@
+module Temporal
+ class Workflow
+ class History
+ # Functions for deserializing workflow histories from JSON and protobuf. These are useful
+ # in writing replay tests
+ #
+ # `from_` methods return Temporal::Workflow::History instances.`
+ # `to_` methods take Temporalio::Api::History::V1::History instances
+ #
+ # This asymmetry stems from our own internal history representation being a projection
+ # of the "full" history.
+ class Serialization
+ # Parse History from a JSON string
+ def self.from_json(json)
+ raw_history = Temporalio::Api::History::V1::History.decode_json(json, ignore_unknown_fields: true)
+ Workflow::History.new(raw_history.events)
+ end
+
+ # Convert a raw history to JSON. This method is typically only used by methods on Workflow::Client
+ def self.to_json(raw_history, pretty_print: true)
+ json = raw_history.to_json
+ if pretty_print
+ # pretty print JSON to make it more debuggable
+ ::JSON.pretty_generate(::JSON.load(json))
+ else
+ json
+ end
+ end
+
+ def self.from_json_file(path)
+ self.from_json(File.read(path))
+ end
+
+ def self.to_json_file(raw_history, path, pretty_print: true)
+ json = self.to_json(raw_history, pretty_print: pretty_print)
+ File.write(path, json)
+ end
+
+ def self.from_protobuf(protobuf)
+ raw_history = Temporalio::Api::History::V1::History.decode(protobuf)
+ Workflow::History.new(raw_history.events)
+ end
+
+ def self.to_protobuf(raw_history)
+ raw_history.to_proto
+ end
+
+ def self.from_protobuf_file(path)
+ self.from_protobuf(File.open(path, "rb", &:read))
+ end
+
+ def self.to_protobuf_file(raw_history, path)
+ protobuf = self.to_protobuf(raw_history)
+ File.open(path, "wb") do |f|
+ f.write(protobuf)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/temporal/workflow/history/size.rb b/lib/temporal/workflow/history/size.rb
new file mode 100644
index 00000000..502cb0b4
--- /dev/null
+++ b/lib/temporal/workflow/history/size.rb
@@ -0,0 +1,11 @@
+module Temporal
+ class Workflow
+ class History
+ Size = Struct.new(
+ :bytes, # integer, total number of bytes used
+ :events, # integer, total number of history events used
+ :suggest_continue_as_new, # boolean, true if server history length limits are being approached
+ keyword_init: true)
+ end
+ end
+end
diff --git a/lib/temporal/workflow/history/window.rb b/lib/temporal/workflow/history/window.rb
index 944c8d25..657b129b 100644
--- a/lib/temporal/workflow/history/window.rb
+++ b/lib/temporal/workflow/history/window.rb
@@ -1,15 +1,20 @@
+require 'set'
+require 'temporal/workflow/sdk_flags'
+
module Temporal
class Workflow
class History
class Window
- attr_reader :local_time, :last_event_id, :events, :markers
+ attr_reader :local_time, :last_event_id, :events, :sdk_flags, :history_size_bytes, :suggest_continue_as_new
def initialize
@local_time = nil
@last_event_id = nil
@events = []
- @markers = []
@replay = false
+ @sdk_flags = Set.new
+ @history_size_bytes = 0
+ @suggest_continue_as_new = false
end
def replay?
@@ -18,16 +23,21 @@ def replay?
def add(event)
case event.type
- when 'MARKER_RECORDED'
- markers << event
when 'WORKFLOW_TASK_STARTED'
@last_event_id = event.id + 1 # one for completed
@local_time = event.timestamp
+ @history_size_bytes = event.attributes.history_size_bytes
+ @suggest_continue_as_new = event.attributes.suggest_continue_as_new
when 'WORKFLOW_TASK_FAILED', 'WORKFLOW_TASK_TIMED_OUT'
@last_event_id = nil
@local_time = nil
when 'WORKFLOW_TASK_COMPLETED'
@replay = true
+ used_flags = Set.new(event.attributes&.sdk_metadata&.lang_used_flags)
+ unknown_flags = used_flags.difference(SDKFlags::ALL)
+ raise Temporal::UnknownSDKFlagError, "Unknown SDK flags: #{unknown_flags.join(',')}" if unknown_flags.any?
+
+ used_flags.each { |flag| sdk_flags.add(flag) }
when 'WORKFLOW_TASK_SCHEDULED'
# no-op
else
diff --git a/lib/temporal/workflow/poller.rb b/lib/temporal/workflow/poller.rb
index f312d0f9..198f4502 100644
--- a/lib/temporal/workflow/poller.rb
+++ b/lib/temporal/workflow/poller.rb
@@ -1,22 +1,27 @@
+require 'grpc/errors'
require 'temporal/connection'
require 'temporal/thread_pool'
require 'temporal/middleware/chain'
require 'temporal/workflow/task_processor'
require 'temporal/error_handler'
+require 'temporal/metric_keys'
module Temporal
class Workflow
class Poller
DEFAULT_OPTIONS = {
- thread_pool_size: 10
+ thread_pool_size: 10,
+ binary_checksum: nil,
+ poll_retry_seconds: 0
}.freeze
- def initialize(namespace, task_queue, workflow_lookup, config, middleware = [], options = {})
+ def initialize(namespace, task_queue, workflow_lookup, config, middleware = [], workflow_middleware = [], options = {})
@namespace = namespace
@task_queue = task_queue
@workflow_lookup = workflow_lookup
@config = config
@middleware = middleware
+ @workflow_middleware = workflow_middleware
@shutting_down = false
@options = DEFAULT_OPTIONS.merge(options)
end
@@ -28,7 +33,7 @@ def start
def stop_polling
@shutting_down = true
- Temporal.logger.info('Shutting down a workflow poller')
+ Temporal.logger.info('Shutting down a workflow poller', { namespace: namespace, task_queue: task_queue })
end
def cancel_pending_requests
@@ -36,13 +41,18 @@ def cancel_pending_requests
end
def wait
+ unless shutting_down?
+ raise 'Workflow poller waiting for shutdown completion without being in shutting_down state!'
+ end
+
thread.join
thread_pool.shutdown
end
private
- attr_reader :namespace, :task_queue, :connection, :workflow_lookup, :config, :middleware, :options, :thread
+ attr_reader :namespace, :task_queue, :connection, :workflow_lookup, :config, :middleware, :workflow_middleware,
+ :options, :thread
def connection
@connection ||= Temporal::Connection.generate(config.for_connection)
@@ -53,6 +63,9 @@ def shutting_down?
end
def poll_loop
+ # Prevent the poller thread from silently dying
+ Thread.current.abort_on_exception = true
+
last_poll_time = Time.now
metrics_tags = { namespace: namespace, task_queue: task_queue }.freeze
@@ -62,11 +75,18 @@ def poll_loop
return if shutting_down?
time_diff_ms = ((Time.now - last_poll_time) * 1000).round
- Temporal.metrics.timing('workflow_poller.time_since_last_poll', time_diff_ms, metrics_tags)
- Temporal.logger.debug("Polling Worklow task queue", { namespace: namespace, task_queue: task_queue })
+ Temporal.metrics.timing(Temporal::MetricKeys::WORKFLOW_POLLER_TIME_SINCE_LAST_POLL, time_diff_ms,
+ metrics_tags)
+ Temporal.logger.debug('Polling workflow task queue', { namespace: namespace, task_queue: task_queue })
task = poll_for_task
last_poll_time = Time.now
+
+ Temporal.metrics.increment(
+ Temporal::MetricKeys::WORKFLOW_POLLER_POLL_COMPLETED,
+ metrics_tags.merge(received_task: (!task.nil?).to_s)
+ )
+
next unless task&.workflow_type
thread_pool.schedule { process(task) }
@@ -74,22 +94,47 @@ def poll_loop
end
def poll_for_task
- connection.poll_workflow_task_queue(namespace: namespace, task_queue: task_queue)
- rescue StandardError => error
- Temporal.logger.error("Unable to poll Workflow task queue", { namespace: namespace, task_queue: task_queue, error: error.inspect })
- Temporal::ErrorHandler.handle(error)
+ connection.poll_workflow_task_queue(namespace: namespace, task_queue: task_queue,
+ binary_checksum: binary_checksum)
+ rescue ::GRPC::Cancelled
+ # We're shutting down and we've already reported that in the logs
+ nil
+ rescue StandardError => e
+ Temporal.logger.error('Unable to poll Workflow task queue',
+ { namespace: namespace, task_queue: task_queue, error: e.inspect })
+ Temporal::ErrorHandler.handle(e, config)
+
+ sleep(poll_retry_seconds)
nil
end
def process(task)
middleware_chain = Middleware::Chain.new(middleware)
+ workflow_middleware_chain = Middleware::Chain.new(workflow_middleware)
- TaskProcessor.new(task, namespace, workflow_lookup, middleware_chain, config).process
+ TaskProcessor.new(task, task_queue, namespace, workflow_lookup, middleware_chain, workflow_middleware_chain,
+ config, binary_checksum).process
end
def thread_pool
- @thread_pool ||= ThreadPool.new(options[:thread_pool_size])
+ @thread_pool ||= ThreadPool.new(
+ options[:thread_pool_size],
+ @config,
+ {
+ pool_name: 'workflow_task_poller',
+ namespace: namespace,
+ task_queue: task_queue
+ }
+ )
+ end
+
+ def binary_checksum
+ @options[:binary_checksum]
+ end
+
+ def poll_retry_seconds
+ @options[:poll_retry_seconds]
end
end
end
diff --git a/lib/temporal/workflow/query_registry.rb b/lib/temporal/workflow/query_registry.rb
new file mode 100644
index 00000000..162b7a7f
--- /dev/null
+++ b/lib/temporal/workflow/query_registry.rb
@@ -0,0 +1,36 @@
+require 'temporal/errors'
+
+module Temporal
+ class Workflow
+ class QueryRegistry
+ def initialize
+ @handlers = {}
+ end
+
+ def register(type, &handler)
+ if handlers.key?(type)
+ warn "[NOTICE] Overwriting a query handler for #{type}"
+ end
+
+ handlers[type] = handler
+ end
+
+ def handle(type, args = nil)
+ handler = handlers[type]
+
+ unless handler
+ # The end of the formatted error message (e.g., "KnownQueryTypes=[query-1, query-2, query-3]")
+ # is used by temporal-web to show a list of queries that can be run on the 'Query' tab of a
+ # workflow. If that part of the error message is changed, that functionality will break.
+ raise Temporal::QueryFailed, "Workflow did not register a handler for '#{type}'. KnownQueryTypes=[#{handlers.keys.join(", ")}]"
+ end
+
+ handler.call(*args)
+ end
+
+ private
+
+ attr_reader :handlers
+ end
+ end
+end
diff --git a/lib/temporal/workflow/query_result.rb b/lib/temporal/workflow/query_result.rb
new file mode 100644
index 00000000..a4d0401e
--- /dev/null
+++ b/lib/temporal/workflow/query_result.rb
@@ -0,0 +1,16 @@
+module Temporal
+ class Workflow
+ module QueryResult
+ Answer = Struct.new(:result)
+ Failure = Struct.new(:error)
+
+ def self.answer(result)
+ Answer.new(result).freeze
+ end
+
+ def self.failure(error)
+ Failure.new(error).freeze
+ end
+ end
+ end
+end
diff --git a/lib/temporal/workflow/replay_aware_logger.rb b/lib/temporal/workflow/replay_aware_logger.rb
index a56494b4..65dafc59 100644
--- a/lib/temporal/workflow/replay_aware_logger.rb
+++ b/lib/temporal/workflow/replay_aware_logger.rb
@@ -3,11 +3,9 @@ class Workflow
class ReplayAwareLogger
SEVERITIES = %i[debug info warn error fatal unknown].freeze
- attr_writer :replay
-
- def initialize(main_logger, replay = true)
+ def initialize(main_logger, replaying:)
@main_logger = main_logger
- @replay = replay
+ @replaying = replaying
end
SEVERITIES.each do |severity|
@@ -29,7 +27,7 @@ def log(severity, message, data = {})
attr_reader :main_logger
def replay?
- @replay
+ @replaying.call
end
end
end
diff --git a/lib/temporal/workflow/sdk_flags.rb b/lib/temporal/workflow/sdk_flags.rb
new file mode 100644
index 00000000..7849bf08
--- /dev/null
+++ b/lib/temporal/workflow/sdk_flags.rb
@@ -0,0 +1,14 @@
+require 'set'
+
+module Temporal
+ class Workflow
+ module SDKFlags
+ HANDLE_SIGNALS_FIRST = 1
+ # The presence of SAVE_FIRST_TASK_SIGNALS implies HANDLE_SIGNALS_FIRST
+ SAVE_FIRST_TASK_SIGNALS = 2
+
+ # Make sure to include all known flags here
+ ALL = Set.new([HANDLE_SIGNALS_FIRST, SAVE_FIRST_TASK_SIGNALS])
+ end
+ end
+end
diff --git a/lib/temporal/workflow/signal.rb b/lib/temporal/workflow/signal.rb
new file mode 100644
index 00000000..e31d77fc
--- /dev/null
+++ b/lib/temporal/workflow/signal.rb
@@ -0,0 +1,5 @@
+module Temporal
+ class Workflow
+ Signal = Struct.new(:signal_name)
+ end
+end
diff --git a/lib/temporal/workflow/stack_trace_tracker.rb b/lib/temporal/workflow/stack_trace_tracker.rb
new file mode 100644
index 00000000..dfa35c94
--- /dev/null
+++ b/lib/temporal/workflow/stack_trace_tracker.rb
@@ -0,0 +1,39 @@
+require 'fiber'
+
+module Temporal
+ class Workflow
+ # Temporal-web issues a query that returns the stack trace for all workflow fibers
+ # that are currently scheduled. This is helpful for understanding what exactly a
+ # workflow is waiting on.
+ class StackTraceTracker
+ STACK_TRACE_QUERY_NAME = '__stack_trace'
+
+ def initialize
+ @stack_traces = {}
+ end
+
+ # Record the stack trace for the current fiber
+ def record
+ stack_traces[Fiber.current] = Kernel.caller
+ end
+
+ # Clear the stack traces for the current fiber
+ def clear
+ stack_traces.delete(Fiber.current)
+ end
+
+ # Format all recorded backtraces in a human readable format
+ def to_s
+ formatted_stack_traces = ["Fiber count: #{stack_traces.count}"] + stack_traces.map do |_, stack_trace|
+ stack_trace.join("\n")
+ end
+
+ formatted_stack_traces.join("\n\n") + "\n"
+ end
+
+ private
+
+ attr_reader :stack_traces
+ end
+ end
+end
diff --git a/lib/temporal/workflow/state_manager.rb b/lib/temporal/workflow/state_manager.rb
index 693f1305..c90ed3de 100644
--- a/lib/temporal/workflow/state_manager.rb
+++ b/lib/temporal/workflow/state_manager.rb
@@ -3,24 +3,23 @@
require 'temporal/workflow/command'
require 'temporal/workflow/command_state_machine'
require 'temporal/workflow/history/event_target'
-require 'temporal/metadata'
-require 'temporal/concerns/payloads'
+require 'temporal/workflow/history/size'
require 'temporal/workflow/errors'
+require 'temporal/workflow/sdk_flags'
+require 'temporal/workflow/signal'
module Temporal
class Workflow
class StateManager
- include Concerns::Payloads
-
SIDE_EFFECT_MARKER = 'SIDE_EFFECT'.freeze
RELEASE_MARKER = 'RELEASE'.freeze
class UnsupportedEvent < Temporal::InternalError; end
class UnsupportedMarkerType < Temporal::InternalError; end
- attr_reader :commands, :local_time
+ attr_reader :local_time, :search_attributes, :new_sdk_flags_used, :sdk_flags, :first_task_signals
- def initialize(dispatcher)
+ def initialize(dispatcher, config)
@dispatcher = dispatcher
@commands = []
@marker_ids = Set.new
@@ -30,6 +29,26 @@ def initialize(dispatcher)
@last_event_id = 0
@local_time = nil
@replay = false
+ @search_attributes = {}
+ @config = config
+ @converter = config.converter
+
+ # Current flags in use, built up from workflow task completed history entries
+ @sdk_flags = Set.new
+
+ # New flags used when not replaying
+ @new_sdk_flags_used = Set.new
+
+ # Because signals must be processed first and a signal handler cannot be registered
+ # until workflow code runs, this dispatcher handler will save these signals for
+ # when a callback is first registered.
+ @first_task_signals = []
+ @first_task_signal_handler = dispatcher.register_handler(
+ Dispatcher::WILDCARD,
+ 'signaled'
+ ) do |name, input|
+ @first_task_signals << [name, input]
+ end
end
def replay?
@@ -40,9 +59,7 @@ def schedule(command)
# Fast-forward event IDs to skip all the markers (version markers can
# be removed, so we can't rely on them being scheduled during a replay)
command_id = next_event_id
- while marker_ids.include?(command_id) do
- command_id = next_event_id
- end
+ command_id = next_event_id while marker_ids.include?(command_id)
cancelation_id =
case command
@@ -52,14 +69,38 @@ def schedule(command)
command.workflow_id ||= command_id
when Command::StartTimer
command.timer_id ||= command_id
+ when Command::UpsertSearchAttributes
+ # This allows newly upserted search attributes to be read
+ # immediately. Without this, attributes would not be available
+ # until the next history window is applied on replay.
+ search_attributes.merge!(command.search_attributes)
end
state_machine = command_tracker[command_id]
state_machine.requested if state_machine.state == CommandStateMachine::NEW_STATE
+ validate_append_command(command)
commands << [command_id, command]
- return [event_target_from(command_id, command), cancelation_id]
+ [event_target_from(command_id, command), cancelation_id]
+ end
+
+ def final_commands
+ # Filter out any activity or timer cancellation commands if the underlying activity or
+ # timer has completed. This can occur when an activity or timer completes while a
+ # workflow task is being processed that would otherwise cancel this time or activity.
+ commands.filter do |command_pair|
+ case command_pair.last
+ when Command::CancelTimer
+ state_machine = command_tracker[command_pair.last.timer_id]
+ !state_machine.closed?
+ when Command::RequestActivityCancellation
+ state_machine = command_tracker[command_pair.last.activity_id]
+ !state_machine.closed?
+ else
+ true
+ end
+ end
end
def release?(release_name)
@@ -76,44 +117,155 @@ def apply(history_window)
@replay = history_window.replay?
@local_time = history_window.local_time
@last_event_id = history_window.last_event_id
+ history_window.sdk_flags.each { |flag| sdk_flags.add(flag) }
+ @history_size_bytes = history_window.history_size_bytes
+ @suggest_continue_as_new = history_window.suggest_continue_as_new
- # handle markers first since their data is needed for processing events
- history_window.markers.each do |event|
+ order_events(history_window.events).each do |event|
apply_event(event)
end
- history_window.events.each do |event|
- apply_event(event)
+ return unless @first_task_signal_handler
+
+ @first_task_signal_handler.unregister
+ @first_task_signals = []
+ @first_task_signal_handler = nil
+ end
+
+ def self.event_order(event, signals_first, execution_started_before_signals)
+ if event.type == 'MARKER_RECORDED'
+ # markers always come first
+ 0
+ elsif !execution_started_before_signals && workflow_execution_started_event?(event)
+ 1
+ elsif signals_first && signal_event?(event)
+ # signals come next if we are in signals first mode
+ 2
+ else
+ # then everything else
+ 3
end
end
+ def self.signal_event?(event)
+ event.type == 'WORKFLOW_EXECUTION_SIGNALED'
+ end
+
+ def self.workflow_execution_started_event?(event)
+ event.type == 'WORKFLOW_EXECUTION_STARTED'
+ end
+
+ def history_size
+ History::Size.new(
+ events: @last_event_id,
+ bytes: @history_size_bytes,
+ suggest_continue_as_new: @suggest_continue_as_new
+ ).freeze
+ end
+
private
- attr_reader :dispatcher, :command_tracker, :marker_ids, :side_effects, :releases
+ attr_reader :commands, :dispatcher, :command_tracker, :marker_ids, :side_effects, :releases, :config, :converter
+
+ def use_signals_first(raw_events)
+ # The presence of SAVE_FIRST_TASK_SIGNALS implies HANDLE_SIGNALS_FIRST
+ if sdk_flags.include?(SDKFlags::HANDLE_SIGNALS_FIRST) || sdk_flags.include?(SDKFlags::SAVE_FIRST_TASK_SIGNALS)
+ # If signals were handled first when this task or a previous one in this run were first
+ # played, we must continue to do so in order to ensure determinism regardless of what
+ # the configuration value is set to. Even the capabilities can be ignored because the
+ # server must have returned SDK metadata for this to be true.
+ true
+ elsif raw_events.any? { |event| StateManager.signal_event?(event) } &&
+ # If this is being played for the first time, use the configuration flag to choose
+ !replay? && !config.legacy_signals &&
+ # In order to preserve determinism, the server must support SDK metadata to order signals
+ # first. This is checked last because it will result in a Temporal server call the first
+ # time it's called in the worker process.
+ config.capabilities.sdk_metadata
+
+ if raw_events.any? do |event|
+ StateManager.workflow_execution_started_event?(event)
+ end && !config.no_signals_in_first_task
+ report_flag_used(SDKFlags::SAVE_FIRST_TASK_SIGNALS)
+ else
+ report_flag_used(SDKFlags::HANDLE_SIGNALS_FIRST)
+ end
+
+ true
+ else
+ false
+ end
+ end
+
+ def order_events(raw_events)
+ signals_first = use_signals_first(raw_events)
+ execution_started_before_signals = sdk_flags.include?(SDKFlags::SAVE_FIRST_TASK_SIGNALS)
+
+ raw_events.sort_by.with_index do |event, index|
+ # sort_by is not stable, so include index to preserve order
+ [StateManager.event_order(event, signals_first, execution_started_before_signals), index]
+ end
+ end
+
+ def report_flag_used(flag)
+ # Only add the flag if it's not already present and we are not replaying
+ if !replay? &&
+ !sdk_flags.include?(flag) &&
+ !new_sdk_flags_used.include?(flag)
+ new_sdk_flags_used << flag
+ sdk_flags << flag
+ end
+ end
def next_event_id
@last_event_id += 1
end
+ def validate_append_command(command)
+ return if commands.last.nil?
+
+ _, previous_command = commands.last
+ case previous_command
+ when Command::CompleteWorkflow, Command::FailWorkflow, Command::ContinueAsNew
+ context_string = case previous_command
+ when Command::CompleteWorkflow
+ 'The workflow completed'
+ when Command::FailWorkflow
+ 'The workflow failed'
+ when Command::ContinueAsNew
+ 'The workflow continued as new'
+ end
+ raise Temporal::WorkflowAlreadyCompletingError, "You cannot do anything in a Workflow after it completes. #{context_string}, "\
+ "but then it sent a new command: #{command.class}. This can happen, for example, if you've "\
+ 'not waited for all of your Activity futures before finishing the Workflow.'
+ end
+ end
+
def apply_event(event)
state_machine = command_tracker[event.originating_event_id]
- target = History::EventTarget.from_event(event)
+ history_target = History::EventTarget.from_event(event)
case event.type
when 'WORKFLOW_EXECUTION_STARTED'
+ unless event.attributes.search_attributes.nil?
+ search_attributes.merge!(converter.from_payload_map(event.attributes.search_attributes&.indexed_fields || {}))
+ end
+
state_machine.start
dispatch(
- History::EventTarget.workflow,
+ History::EventTarget.start_workflow,
'started',
- from_payloads(event.attributes.input),
- Metadata.generate(Metadata::WORKFLOW_TYPE, event.attributes)
+ converter.from_payloads(event.attributes.input),
+ event
)
when 'WORKFLOW_EXECUTION_COMPLETED'
- # todo
+ # should only be triggered in query execution and replay testing
+ discard_command(history_target)
when 'WORKFLOW_EXECUTION_FAILED'
- # todo
+ # should only be triggered in query execution and replay testing
+ discard_command(history_target)
when 'WORKFLOW_EXECUTION_TIMED_OUT'
# todo
@@ -135,53 +287,55 @@ def apply_event(event)
when 'ACTIVITY_TASK_SCHEDULED'
state_machine.schedule
- discard_command(target)
+ discard_command(history_target)
when 'ACTIVITY_TASK_STARTED'
state_machine.start
when 'ACTIVITY_TASK_COMPLETED'
state_machine.complete
- dispatch(target, 'completed', from_result_payloads(event.attributes.result))
+ dispatch(history_target, 'completed', converter.from_result_payloads(event.attributes.result))
when 'ACTIVITY_TASK_FAILED'
state_machine.fail
- dispatch(target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure, ActivityException))
+ dispatch(history_target, 'failed',
+ Temporal::Workflow::Errors.generate_error(event.attributes.failure, converter, ActivityException))
when 'ACTIVITY_TASK_TIMED_OUT'
state_machine.time_out
- dispatch(target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure))
+ dispatch(history_target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure, converter))
when 'ACTIVITY_TASK_CANCEL_REQUESTED'
state_machine.requested
- discard_command(target)
+ discard_command(history_target)
when 'REQUEST_CANCEL_ACTIVITY_TASK_FAILED'
state_machine.fail
- discard_command(target)
- dispatch(target, 'failed', event.attributes.cause, nil)
+ discard_command(history_target)
+ dispatch(history_target, 'failed', event.attributes.cause, nil)
when 'ACTIVITY_TASK_CANCELED'
state_machine.cancel
- dispatch(target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure))
+ dispatch(history_target, 'failed',
+ Temporal::ActivityCanceled.new(converter.from_details_payloads(event.attributes.details)))
when 'TIMER_STARTED'
state_machine.start
- discard_command(target)
+ discard_command(history_target)
when 'TIMER_FIRED'
state_machine.complete
- dispatch(target, 'fired')
+ dispatch(history_target, 'fired')
when 'CANCEL_TIMER_FAILED'
state_machine.failed
- discard_command(target)
- dispatch(target, 'failed', event.attributes.cause, nil)
+ discard_command(history_target)
+ dispatch(history_target, 'failed', event.attributes.cause, nil)
when 'TIMER_CANCELED'
state_machine.cancel
- discard_command(target)
- dispatch(target, 'canceled')
+ discard_command(history_target)
+ dispatch(history_target, 'canceled')
when 'WORKFLOW_EXECUTION_CANCEL_REQUESTED'
# todo
@@ -200,58 +354,80 @@ def apply_event(event)
when 'MARKER_RECORDED'
state_machine.complete
- handle_marker(event.id, event.attributes.marker_name, from_details_payloads(event.attributes.details['data']))
+ handle_marker(event.id, event.attributes.marker_name, converter.from_details_payloads(event.attributes.details['data']))
when 'WORKFLOW_EXECUTION_SIGNALED'
- dispatch(target, 'signaled', event.attributes.signal_name, from_signal_payloads(event.attributes.input))
+ # relies on Signal#== for matching in Dispatcher
+ signal_target = Signal.new(event.attributes.signal_name)
+ dispatch(signal_target, 'signaled', event.attributes.signal_name,
+ converter.from_signal_payloads(event.attributes.input))
when 'WORKFLOW_EXECUTION_TERMINATED'
# todo
when 'WORKFLOW_EXECUTION_CONTINUED_AS_NEW'
- # todo
+ # should only be triggered in query execution and replay testing
+ discard_command(history_target)
when 'START_CHILD_WORKFLOW_EXECUTION_INITIATED'
state_machine.schedule
- discard_command(target)
+ discard_command(history_target)
when 'START_CHILD_WORKFLOW_EXECUTION_FAILED'
state_machine.fail
- dispatch(target, 'failed', 'StandardError', from_payloads(event.attributes.cause))
-
+ error = Temporal::Workflow::Errors.generate_error_for_child_workflow_start(
+ event.attributes.cause,
+ event.attributes.workflow_id
+ )
+ dispatch(history_target, 'failed', error)
when 'CHILD_WORKFLOW_EXECUTION_STARTED'
+ dispatch(history_target, 'started', event.attributes.workflow_execution)
state_machine.start
when 'CHILD_WORKFLOW_EXECUTION_COMPLETED'
state_machine.complete
- dispatch(target, 'completed', from_result_payloads(event.attributes.result))
+ dispatch(history_target, 'completed', converter.from_result_payloads(event.attributes.result))
when 'CHILD_WORKFLOW_EXECUTION_FAILED'
state_machine.fail
- dispatch(target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure))
+ dispatch(history_target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure, converter))
when 'CHILD_WORKFLOW_EXECUTION_CANCELED'
state_machine.cancel
- dispatch(target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure))
+ dispatch(history_target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure, converter))
when 'CHILD_WORKFLOW_EXECUTION_TIMED_OUT'
state_machine.time_out
- dispatch(target, 'failed', Temporal::Workflow::Errors.generate_error(event.attributes.failure))
+ dispatch(history_target, 'failed',
+ ChildWorkflowTimeoutError.new('The child workflow timed out before succeeding'))
when 'CHILD_WORKFLOW_EXECUTION_TERMINATED'
- # todo
-
+ state_machine.terminated
+ dispatch(history_target, 'failed', ChildWorkflowTerminatedError.new('The child workflow was terminated'))
when 'SIGNAL_EXTERNAL_WORKFLOW_EXECUTION_INITIATED'
- # todo
+ # Temporal Server will try to Signal the targeted Workflow
+ # Contains the Signal name, as well as a Signal payload
+ # The workflow that sends the signal creates this event in its log; the
+ # receiving workflow records WORKFLOW_EXECUTION_SIGNALED on reception
+ state_machine.start
+ discard_command(history_target)
when 'SIGNAL_EXTERNAL_WORKFLOW_EXECUTION_FAILED'
- # todo
+ # Temporal Server cannot Signal the targeted Workflow
+ # Usually because the Workflow could not be found
+ state_machine.fail
+ dispatch(history_target, 'failed', 'StandardError', event.attributes.cause)
when 'EXTERNAL_WORKFLOW_EXECUTION_SIGNALED'
- # todo
+ # Temporal Server has successfully Signaled the targeted Workflow
+ # Return the result to the Future waiting on this
+ state_machine.complete
+ dispatch(history_target, 'completed')
when 'UPSERT_WORKFLOW_SEARCH_ATTRIBUTES'
- # todo
+ search_attributes.merge!(converter.from_payload_map(event.attributes.search_attributes&.indexed_fields || {}))
+ # no need to track state; this is just a synchronous API call.
+ discard_command(history_target)
else
raise UnsupportedEvent, event.type
@@ -271,31 +447,46 @@ def event_target_from(command_id, command)
History::EventTarget::TIMER_TYPE
when Command::CancelTimer
History::EventTarget::CANCEL_TIMER_REQUEST_TYPE
- when Command::CompleteWorkflow, Command::FailWorkflow
- History::EventTarget::WORKFLOW_TYPE
+ when Command::CompleteWorkflow
+ History::EventTarget::COMPLETE_WORKFLOW_TYPE
+ when Command::ContinueAsNew
+ History::EventTarget::CONTINUE_AS_NEW_WORKFLOW_TYPE
+ when Command::FailWorkflow
+ History::EventTarget::FAIL_WORKFLOW_TYPE
when Command::StartChildWorkflow
History::EventTarget::CHILD_WORKFLOW_TYPE
+ when Command::UpsertSearchAttributes
+ History::EventTarget::UPSERT_SEARCH_ATTRIBUTES_REQUEST_TYPE
+ when Command::SignalExternalWorkflow
+ History::EventTarget::EXTERNAL_WORKFLOW_TYPE
end
History::EventTarget.new(command_id, target_type)
end
- def dispatch(target, name, *attributes)
- dispatcher.dispatch(target, name, attributes)
+ def dispatch(history_target, name, *attributes)
+ dispatcher.dispatch(history_target, name, attributes)
end
- def discard_command(target)
+ NONDETERMINISM_ERROR_SUGGESTION =
+ 'Likely, either you have made a version-unsafe change to your workflow or have non-deterministic '\
+ 'behavior in your workflow. See https://docs.temporal.io/docs/java/versioning/#introduction-to-versioning.'.freeze
+
+ def discard_command(history_target)
# Pop the first command from the list, it is expected to match
- existing_command_id, existing_command = commands.shift
+ replay_command_id, replay_command = commands.shift
- if !existing_command_id
- raise NonDeterministicWorkflowError, "A command #{target} was not scheduled upon replay"
+ unless replay_command_id
+ raise NonDeterministicWorkflowError,
+ "A command in the history of previous executions, #{history_target}, was not scheduled upon replay. " + NONDETERMINISM_ERROR_SUGGESTION
end
- existing_target = event_target_from(existing_command_id, existing_command)
- if target != existing_target
- raise NonDeterministicWorkflowError, "Unexpected command #{existing_target} (expected #{target})"
- end
+ replay_target = event_target_from(replay_command_id, replay_command)
+ return unless history_target != replay_target
+
+ raise NonDeterministicWorkflowError,
+ "Unexpected command. The replaying code is issuing: #{replay_target}, "\
+ "but the history of previous executions recorded: #{history_target}. " + NONDETERMINISM_ERROR_SUGGESTION
end
def handle_marker(id, type, details)
@@ -320,7 +511,6 @@ def track_release(release_name)
schedule(Command::RecordMarker.new(name: RELEASE_MARKER, details: release_name))
end
end
-
end
end
end
diff --git a/lib/temporal/workflow/status.rb b/lib/temporal/workflow/status.rb
new file mode 100644
index 00000000..dd3875ab
--- /dev/null
+++ b/lib/temporal/workflow/status.rb
@@ -0,0 +1,24 @@
+module Temporal
+ class Workflow
+ module Status
+ RUNNING = :RUNNING
+ COMPLETED = :COMPLETED
+ FAILED = :FAILED
+ CANCELED = :CANCELED
+ TERMINATED = :TERMINATED
+ CONTINUED_AS_NEW = :CONTINUED_AS_NEW
+ TIMED_OUT = :TIMED_OUT
+
+
+ API_STATUS_MAP = {
+ WORKFLOW_EXECUTION_STATUS_RUNNING: RUNNING,
+ WORKFLOW_EXECUTION_STATUS_COMPLETED: COMPLETED,
+ WORKFLOW_EXECUTION_STATUS_FAILED: FAILED,
+ WORKFLOW_EXECUTION_STATUS_CANCELED: CANCELED,
+ WORKFLOW_EXECUTION_STATUS_TERMINATED: TERMINATED,
+ WORKFLOW_EXECUTION_STATUS_CONTINUED_AS_NEW: CONTINUED_AS_NEW,
+ WORKFLOW_EXECUTION_STATUS_TIMED_OUT: TIMED_OUT
+ }.freeze
+ end
+ end
+end
\ No newline at end of file
diff --git a/lib/temporal/workflow/task_processor.rb b/lib/temporal/workflow/task_processor.rb
index c22e2c7d..b3620ad8 100644
--- a/lib/temporal/workflow/task_processor.rb
+++ b/lib/temporal/workflow/task_processor.rb
@@ -1,58 +1,92 @@
-require 'temporal/workflow/executor'
-require 'temporal/workflow/history'
-require 'temporal/metadata'
require 'temporal/error_handler'
require 'temporal/errors'
+require 'temporal/metadata'
+require 'temporal/workflow/executor'
+require 'temporal/workflow/history'
+require 'temporal/workflow/stack_trace_tracker'
+require 'temporal/metric_keys'
module Temporal
class Workflow
class TaskProcessor
+ Query = Struct.new(:query, :converter) do
+ def query_type
+ query.query_type
+ end
+
+ def query_args
+ converter.from_query_payloads(query.query_args)
+ end
+ end
+
MAX_FAILED_ATTEMPTS = 1
+ LEGACY_QUERY_KEY = :legacy_query
- def initialize(task, namespace, workflow_lookup, middleware_chain, config)
+ def initialize(task, task_queue, namespace, workflow_lookup, middleware_chain, workflow_middleware_chain, config, binary_checksum)
@task = task
+ @task_queue = task_queue
@namespace = namespace
- @metadata = Metadata.generate(Metadata::WORKFLOW_TASK_TYPE, task, namespace)
+ @metadata = Metadata.generate_workflow_task_metadata(task, namespace)
@task_token = task.task_token
@workflow_name = task.workflow_type.name
@workflow_class = workflow_lookup.find(workflow_name)
@middleware_chain = middleware_chain
+ @workflow_middleware_chain = workflow_middleware_chain
@config = config
+ @binary_checksum = binary_checksum
end
def process
start_time = Time.now
Temporal.logger.debug("Processing Workflow task", metadata.to_h)
- Temporal.metrics.timing('workflow_task.queue_time', queue_time_ms, workflow: workflow_name)
+ Temporal.metrics.timing(Temporal::MetricKeys::WORKFLOW_TASK_QUEUE_TIME, queue_time_ms, metric_tags)
- if !workflow_class
- raise Temporal::WorkflowNotRegistered, 'Workflow is not registered with this worker'
- end
+ raise Temporal::WorkflowNotRegistered, 'Workflow is not registered with this worker' unless workflow_class
history = fetch_full_history
+ queries = parse_queries
+
+ # We only need to track the stack trace if this is a stack trace query
+ track_stack_trace = queries.values.map(&:query_type).include?(StackTraceTracker::STACK_TRACE_QUERY_NAME)
+
# TODO: For sticky workflows we need to cache the Executor instance
- executor = Workflow::Executor.new(workflow_class, history, config)
+ executor = Workflow::Executor.new(workflow_class, history, metadata, config, track_stack_trace,
+ workflow_middleware_chain)
- commands = middleware_chain.invoke(metadata) do
+ run_result = middleware_chain.invoke(metadata) do
executor.run
end
- complete_task(commands)
- rescue StandardError => error
- Temporal::ErrorHandler.handle(error, metadata: metadata)
+ query_results = executor.process_queries(queries)
- fail_task(error)
+ if legacy_query_task?
+ complete_query(query_results[LEGACY_QUERY_KEY])
+ else
+ complete_task(run_result, query_results)
+ end
+ rescue StandardError => e
+ Temporal::ErrorHandler.handle(e, config, metadata: metadata)
+
+ fail_task(e)
ensure
time_diff_ms = ((Time.now - start_time) * 1000).round
- Temporal.metrics.timing('workflow_task.latency', time_diff_ms, workflow: workflow_name)
- Temporal.logger.debug("Workflow task processed", metadata.to_h.merge(execution_time: time_diff_ms))
+ Temporal.metrics.timing(Temporal::MetricKeys::WORKFLOW_TASK_LATENCY, time_diff_ms, metric_tags)
+ Temporal.logger.debug('Workflow task processed', metadata.to_h.merge(execution_time: time_diff_ms))
+ end
+
+ def metric_tags
+ {
+ workflow: workflow_name,
+ namespace: namespace,
+ task_queue: task_queue
+ }
end
private
- attr_reader :task, :namespace, :task_token, :workflow_name, :workflow_class,
- :middleware_chain, :metadata, :config
+ attr_reader :task, :task_queue, :namespace, :task_token, :workflow_name, :workflow_class,
+ :middleware_chain, :workflow_middleware_chain, :metadata, :config, :binary_checksum
def connection
@connection ||= Temporal::Connection.generate(config.for_connection)
@@ -67,34 +101,66 @@ def queue_time_ms
def fetch_full_history
events = task.history.events.to_a
next_page_token = task.next_page_token
-
- while !next_page_token.empty? do
+ until next_page_token.empty?
response = connection.get_workflow_execution_history(
namespace: namespace,
workflow_id: task.workflow_execution.workflow_id,
run_id: task.workflow_execution.run_id,
next_page_token: next_page_token
)
-
- if response.history.events.empty?
- raise Temporal::UnexpectedResponse, 'Received empty history page'
- end
-
events += response.history.events.to_a
+
next_page_token = response.next_page_token
end
Workflow::History.new(events)
end
- def complete_task(commands)
- Temporal.logger.info("Workflow task completed", metadata.to_h)
+ def legacy_query_task?
+ !!task.query
+ end
+
+ def parse_queries
+ # Support for deprecated query style
+ if legacy_query_task?
+ { LEGACY_QUERY_KEY => Query.new(task.query, config.converter) }
+ else
+ task.queries.each_with_object({}) do |(query_id, query), result|
+ result[query_id] = Query.new(query, config.converter)
+ end
+ end
+ end
+
+ def complete_task(run_result, query_results)
+ Temporal.logger.info('Workflow task completed', metadata.to_h)
+
+ connection.respond_workflow_task_completed(
+ namespace: namespace,
+ task_token: task_token,
+ commands: run_result.commands,
+ binary_checksum: binary_checksum,
+ query_results: query_results,
+ new_sdk_flags_used: run_result.new_sdk_flags_used
+ )
+ end
+
+ def complete_query(result)
+ Temporal.logger.info('Workflow Query task completed', metadata.to_h)
+
+ connection.respond_query_task_completed(
+ namespace: namespace,
+ task_token: task_token,
+ query_result: result
+ )
+ rescue StandardError => e
+ Temporal.logger.error('Unable to complete a query', metadata.to_h.merge(error: e.inspect))
- connection.respond_workflow_task_completed(task_token: task_token, commands: commands)
+ Temporal::ErrorHandler.handle(e, config, metadata: metadata)
end
def fail_task(error)
- Temporal.logger.error("Workflow task failed", metadata.to_h.merge(error: error.inspect))
+ Temporal.metrics.increment(Temporal::MetricKeys::WORKFLOW_TASK_EXECUTION_FAILED, metric_tags)
+ Temporal.logger.error('Workflow task failed', metadata.to_h.merge(error: error.inspect))
Temporal.logger.debug(error.backtrace.join("\n"))
# Only fail the workflow task on the first attempt. Subsequent failures of the same workflow task
@@ -103,14 +169,16 @@ def fail_task(error)
return if task.attempt > MAX_FAILED_ATTEMPTS
connection.respond_workflow_task_failed(
+ namespace: namespace,
task_token: task_token,
- cause: Temporal::Api::Enums::V1::WorkflowTaskFailedCause::WORKFLOW_TASK_FAILED_CAUSE_WORKFLOW_WORKER_UNHANDLED_FAILURE,
- exception: error
+ cause: Temporalio::Api::Enums::V1::WorkflowTaskFailedCause::WORKFLOW_TASK_FAILED_CAUSE_WORKFLOW_WORKER_UNHANDLED_FAILURE,
+ exception: error,
+ binary_checksum: binary_checksum
)
- rescue StandardError => error
- Temporal.logger.error("Unable to fail Workflow task", metadata.to_h.merge(error: error.inspect))
+ rescue StandardError => e
+ Temporal.logger.error('Unable to fail Workflow task', metadata.to_h.merge(error: e.inspect))
- Temporal::ErrorHandler.handle(error, metadata: metadata)
+ Temporal::ErrorHandler.handle(e, config, metadata: metadata)
end
end
end
diff --git a/proto b/proto
index 4c2f6a28..ae312b07 160000
--- a/proto
+++ b/proto
@@ -1 +1 @@
-Subproject commit 4c2f6a281fa3fde8b0a24447de3e0d0f47d230b4
+Subproject commit ae312b0724003957b96fb966e3fe25a02abaade4
diff --git a/rbi/temporal-ruby.rbi b/rbi/temporal-ruby.rbi
index b7da9d12..cdcda078 100644
--- a/rbi/temporal-ruby.rbi
+++ b/rbi/temporal-ruby.rbi
@@ -39,5 +39,5 @@ module Temporal
class FeatureVersionNotSupportedFailure; end
class NamespaceAlreadyExistsFailure; end
class CancellationAlreadyRequestedFailure; end
- class QueryFailedFailure; end
+ class QueryFailed; end
end
diff --git a/spec/config/coveralls.rb b/spec/config/coveralls.rb
deleted file mode 100644
index fe39ae25..00000000
--- a/spec/config/coveralls.rb
+++ /dev/null
@@ -1,3 +0,0 @@
-require 'coveralls'
-
-Coveralls.wear!
diff --git a/spec/config/temporal.rb b/spec/config/temporal.rb
deleted file mode 100644
index 0d868ffe..00000000
--- a/spec/config/temporal.rb
+++ /dev/null
@@ -1,5 +0,0 @@
-RSpec.configure do |config|
- config.before(:each) do
- Temporal.configuration.error_handlers.clear
- end
-end
\ No newline at end of file
diff --git a/spec/config/test_converter.rb b/spec/config/test_converter.rb
new file mode 100644
index 00000000..6cb9fce5
--- /dev/null
+++ b/spec/config/test_converter.rb
@@ -0,0 +1,8 @@
+require 'temporal/converter_wrapper'
+
+# This is a barebones default converter that can be used in tests
+# where default conversion behaviour is expected
+TEST_CONVERTER = Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+).freeze
diff --git a/spec/fabricators/activity_metadata_fabricator.rb b/spec/fabricators/activity_metadata_fabricator.rb
index 06409da6..34ee31a6 100644
--- a/spec/fabricators/activity_metadata_fabricator.rb
+++ b/spec/fabricators/activity_metadata_fabricator.rb
@@ -11,4 +11,7 @@
workflow_name 'TestWorkflow'
headers { {} }
heartbeat_details nil
+ scheduled_at { Time.now }
+ current_attempt_scheduled_at { Time.now }
+ heartbeat_timeout 0
end
diff --git a/spec/fabricators/grpc/activity_task_fabricator.rb b/spec/fabricators/grpc/activity_task_fabricator.rb
index b6fc43fc..82e0886f 100644
--- a/spec/fabricators/grpc/activity_task_fabricator.rb
+++ b/spec/fabricators/grpc/activity_task_fabricator.rb
@@ -1,20 +1,23 @@
require 'securerandom'
-Fabricator(:api_activity_task, from: Temporal::Api::WorkflowService::V1::PollActivityTaskQueueResponse) do
+Fabricator(:api_activity_task, from: Temporalio::Api::WorkflowService::V1::PollActivityTaskQueueResponse) do
transient :task_token, :activity_name, :headers
activity_id { SecureRandom.uuid }
task_token { |attrs| attrs[:task_token] || SecureRandom.uuid }
activity_type { Fabricate(:api_activity_type) }
- input { Temporal.configuration.converter.to_payloads(nil) }
+ input { TEST_CONVERTER.to_payloads(nil) }
workflow_type { Fabricate(:api_workflow_type) }
workflow_execution { Fabricate(:api_workflow_execution) }
current_attempt_scheduled_time { Google::Protobuf::Timestamp.new.tap { |t| t.from_time(Time.now) } }
started_time { Google::Protobuf::Timestamp.new.tap { |t| t.from_time(Time.now) } }
+ scheduled_time { Google::Protobuf::Timestamp.new.tap { |t| t.from_time(Time.now) } }
+ current_attempt_scheduled_time { Google::Protobuf::Timestamp.new.tap { |t| t.from_time(Time.now) } }
header do |attrs|
fields = (attrs[:headers] || {}).each_with_object({}) do |(field, value), h|
- h[field] = Temporal.configuration.converter.to_payload(value)
+ h[field] = TEST_CONVERTER.to_payload(value)
end
- Temporal::Api::Common::V1::Header.new(fields: fields)
+ Temporalio::Api::Common::V1::Header.new(fields: fields)
end
+ heartbeat_timeout { Google::Protobuf::Duration.new }
end
diff --git a/spec/fabricators/grpc/activity_type_fabricator.rb b/spec/fabricators/grpc/activity_type_fabricator.rb
index eace3c89..b1e232ff 100644
--- a/spec/fabricators/grpc/activity_type_fabricator.rb
+++ b/spec/fabricators/grpc/activity_type_fabricator.rb
@@ -1,3 +1,3 @@
-Fabricator(:api_activity_type, from: Temporal::Api::Common::V1::ActivityType) do
+Fabricator(:api_activity_type, from: Temporalio::Api::Common::V1::ActivityType) do
name 'TestActivity'
end
diff --git a/spec/fabricators/grpc/application_failure_fabricator.rb b/spec/fabricators/grpc/application_failure_fabricator.rb
index edf90c82..95089cb7 100644
--- a/spec/fabricators/grpc/application_failure_fabricator.rb
+++ b/spec/fabricators/grpc/application_failure_fabricator.rb
@@ -1,16 +1,12 @@
-require 'temporal/concerns/payloads'
-class TestDeserializer
- include Temporal::Concerns::Payloads
-end
# Simulates Temporal::Connection::Serializer::Failure
-Fabricator(:api_application_failure, from: Temporal::Api::Failure::V1::Failure) do
+Fabricator(:api_application_failure, from: Temporalio::Api::Failure::V1::Failure) do
transient :error_class, :backtrace
message { |attrs| attrs[:message] }
stack_trace { |attrs| attrs[:backtrace].join("\n") }
application_failure_info do |attrs|
- Temporal::Api::Failure::V1::ApplicationFailureInfo.new(
+ Temporalio::Api::Failure::V1::ApplicationFailureInfo.new(
type: attrs[:error_class],
- details: TestDeserializer.new.to_details_payloads(attrs[:message]),
+ details: TEST_CONVERTER.to_details_payloads(attrs[:message]),
)
end
end
diff --git a/spec/fabricators/grpc/get_system_info_fabricator.rb b/spec/fabricators/grpc/get_system_info_fabricator.rb
new file mode 100644
index 00000000..5b35cb33
--- /dev/null
+++ b/spec/fabricators/grpc/get_system_info_fabricator.rb
@@ -0,0 +1,10 @@
+Fabricator(:api_get_system_info, from: Temporalio::Api::WorkflowService::V1::GetSystemInfoResponse) do
+ transient :sdk_metadata_capability
+
+ server_version 'test-7.8.9'
+ capabilities do |attrs|
+ Temporalio::Api::WorkflowService::V1::GetSystemInfoResponse::Capabilities.new(
+ sdk_metadata: attrs.fetch(:sdk_metadata, true)
+ )
+ end
+end
diff --git a/spec/fabricators/grpc/header_fabricator.rb b/spec/fabricators/grpc/header_fabricator.rb
index c70c886d..c641e0c2 100644
--- a/spec/fabricators/grpc/header_fabricator.rb
+++ b/spec/fabricators/grpc/header_fabricator.rb
@@ -1,3 +1,3 @@
-Fabricator(:api_header, from: Temporal::Api::Common::V1::Header) do
- fields { Google::Protobuf::Map.new(:string, :message, Temporal::Api::Common::V1::Payload) }
+Fabricator(:api_header, from: Temporalio::Api::Common::V1::Header) do
+ fields { Google::Protobuf::Map.new(:string, :message, Temporalio::Api::Common::V1::Payload) }
end
diff --git a/spec/fabricators/grpc/history_event_fabricator.rb b/spec/fabricators/grpc/history_event_fabricator.rb
index 0d7e9e48..ad9a55e8 100644
--- a/spec/fabricators/grpc/history_event_fabricator.rb
+++ b/spec/fabricators/grpc/history_event_fabricator.rb
@@ -1,14 +1,20 @@
require 'securerandom'
-Fabricator(:api_history_event, from: Temporal::Api::History::V1::HistoryEvent) do
+Fabricator(:api_history_event, from: Temporalio::Api::History::V1::HistoryEvent) do
event_id { 1 }
event_time { Time.now }
end
Fabricator(:api_workflow_execution_started_event, from: :api_history_event) do
- event_type { Temporal::Api::Enums::V1::EventType::EVENT_TYPE_WORKFLOW_EXECUTION_STARTED }
- workflow_execution_started_event_attributes do
- Temporal::Api::History::V1::WorkflowExecutionStartedEventAttributes.new(
+ transient :headers, :search_attributes
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_WORKFLOW_EXECUTION_STARTED }
+ event_time { Time.now }
+ workflow_execution_started_event_attributes do |attrs|
+ header_fields = TEST_CONVERTER.to_payload_map(attrs[:headers] || {})
+ header = Temporalio::Api::Common::V1::Header.new(fields: header_fields)
+ indexed_fields = attrs[:search_attributes] ? TEST_CONVERTER.to_payload_map(attrs[:search_attributes]) : nil
+
+ Temporalio::Api::History::V1::WorkflowExecutionStartedEventAttributes.new(
workflow_type: Fabricate(:api_workflow_type),
task_queue: Fabricate(:api_task_queue),
input: nil,
@@ -19,15 +25,18 @@
first_execution_run_id: SecureRandom.uuid,
retry_policy: nil,
attempt: 0,
- header: Fabricate(:api_header)
+ header: header,
+ search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new(
+ indexed_fields: indexed_fields
+ )
)
end
end
Fabricator(:api_workflow_execution_completed_event, from: :api_history_event) do
- event_type { Temporal::Api::Enums::V1::EventType::EVENT_TYPE_WORKFLOW_EXECUTION_COMPLETED }
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_WORKFLOW_EXECUTION_COMPLETED }
workflow_execution_completed_event_attributes do |attrs|
- Temporal::Api::History::V1::WorkflowExecutionCompletedEventAttributes.new(
+ Temporalio::Api::History::V1::WorkflowExecutionCompletedEventAttributes.new(
result: nil,
workflow_task_completed_event_id: attrs[:event_id] - 1
)
@@ -35,9 +44,9 @@
end
Fabricator(:api_workflow_task_scheduled_event, from: :api_history_event) do
- event_type { Temporal::Api::Enums::V1::EventType::EVENT_TYPE_WORKFLOW_TASK_SCHEDULED }
- workflow_task_scheduled_event_attributes do |attrs|
- Temporal::Api::History::V1::WorkflowTaskScheduledEventAttributes.new(
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_WORKFLOW_TASK_SCHEDULED }
+ workflow_task_scheduled_event_attributes do |_attrs|
+ Temporalio::Api::History::V1::WorkflowTaskScheduledEventAttributes.new(
task_queue: Fabricate(:api_task_queue),
start_to_close_timeout: 15,
attempt: 0
@@ -46,44 +55,51 @@
end
Fabricator(:api_workflow_task_started_event, from: :api_history_event) do
- event_type { Temporal::Api::Enums::V1::EventType::EVENT_TYPE_WORKFLOW_TASK_STARTED }
+ transient :history_size_bytes, :suggest_continue_as_new
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_WORKFLOW_TASK_STARTED }
workflow_task_started_event_attributes do |attrs|
- Temporal::Api::History::V1::WorkflowTaskStartedEventAttributes.new(
+ Temporalio::Api::History::V1::WorkflowTaskStartedEventAttributes.new(
scheduled_event_id: attrs[:event_id] - 1,
identity: 'test-worker@test-host',
- request_id: SecureRandom.uuid
+ request_id: SecureRandom.uuid,
+ history_size_bytes: attrs[:history_size_bytes],
+ suggest_continue_as_new: attrs[:suggest_continue_as_new]
)
end
end
Fabricator(:api_workflow_task_completed_event, from: :api_history_event) do
- event_type { Temporal::Api::Enums::V1::EventType::EVENT_TYPE_WORKFLOW_TASK_COMPLETED }
+ transient :sdk_flags
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_WORKFLOW_TASK_COMPLETED }
workflow_task_completed_event_attributes do |attrs|
- Temporal::Api::History::V1::WorkflowTaskCompletedEventAttributes.new(
+ Temporalio::Api::History::V1::WorkflowTaskCompletedEventAttributes.new(
scheduled_event_id: attrs[:event_id] - 2,
started_event_id: attrs[:event_id] - 1,
- identity: 'test-worker@test-host'
+ identity: 'test-worker@test-host',
+ binary_checksum: 'v1.0.0',
+ sdk_metadata: Temporalio::Api::Sdk::V1::WorkflowTaskCompletedMetadata.new(
+ lang_used_flags: attrs[:sdk_flags] || []
+ )
)
end
end
Fabricator(:api_activity_task_scheduled_event, from: :api_history_event) do
- event_type { Temporal::Api::Enums::V1::EventType::EVENT_TYPE_ACTIVITY_TASK_SCHEDULED }
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_ACTIVITY_TASK_SCHEDULED }
activity_task_scheduled_event_attributes do |attrs|
- Temporal::Api::History::V1::ActivityTaskScheduledEventAttributes.new(
+ Temporalio::Api::History::V1::ActivityTaskScheduledEventAttributes.new(
activity_id: attrs[:event_id].to_s,
- activity_type: Temporal::Api::Common::V1::ActivityType.new(name: 'TestActivity'),
+ activity_type: Temporalio::Api::Common::V1::ActivityType.new(name: 'TestActivity'),
workflow_task_completed_event_id: attrs[:event_id] - 1,
- namespace: 'test-namespace',
task_queue: Fabricate(:api_task_queue)
)
end
end
Fabricator(:api_activity_task_started_event, from: :api_history_event) do
- event_type { Temporal::Api::Enums::V1::EventType::EVENT_TYPE_ACTIVITY_TASK_STARTED }
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_ACTIVITY_TASK_STARTED }
activity_task_started_event_attributes do |attrs|
- Temporal::Api::History::V1::ActivityTaskStartedEventAttributes.new(
+ Temporalio::Api::History::V1::ActivityTaskStartedEventAttributes.new(
scheduled_event_id: attrs[:event_id] - 1,
identity: 'test-worker@test-host',
request_id: SecureRandom.uuid
@@ -92,9 +108,9 @@
end
Fabricator(:api_activity_task_completed_event, from: :api_history_event) do
- event_type { Temporal::Api::Enums::V1::EventType::EVENT_TYPE_ACTIVITY_TASK_COMPLETED }
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_ACTIVITY_TASK_COMPLETED }
activity_task_completed_event_attributes do |attrs|
- Temporal::Api::History::V1::ActivityTaskCompletedEventAttributes.new(
+ Temporalio::Api::History::V1::ActivityTaskCompletedEventAttributes.new(
result: nil,
scheduled_event_id: attrs[:event_id] - 2,
started_event_id: attrs[:event_id] - 1,
@@ -104,10 +120,10 @@
end
Fabricator(:api_activity_task_failed_event, from: :api_history_event) do
- event_type { Temporal::Api::Enums::V1::EventType::EVENT_TYPE_ACTIVITY_TASK_FAILED }
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_ACTIVITY_TASK_FAILED }
activity_task_failed_event_attributes do |attrs|
- Temporal::Api::History::V1::ActivityTaskFailedEventAttributes.new(
- failure: Temporal::Api::Failure::V1::Failure.new(message: "Activity failed"),
+ Temporalio::Api::History::V1::ActivityTaskFailedEventAttributes.new(
+ failure: Temporalio::Api::Failure::V1::Failure.new(message: 'Activity failed'),
scheduled_event_id: attrs[:event_id] - 2,
started_event_id: attrs[:event_id] - 1,
identity: 'test-worker@test-host'
@@ -115,10 +131,32 @@
end
end
+Fabricator(:api_activity_task_canceled_event, from: :api_history_event) do
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_ACTIVITY_TASK_CANCELED }
+ activity_task_canceled_event_attributes do |attrs|
+ Temporalio::Api::History::V1::ActivityTaskCanceledEventAttributes.new(
+ details: TEST_CONVERTER.to_details_payloads('ACTIVITY_ID_NOT_STARTED'),
+ scheduled_event_id: attrs[:event_id] - 2,
+ started_event_id: nil,
+ identity: 'test-worker@test-host'
+ )
+ end
+end
+
+Fabricator(:api_activity_task_cancel_requested_event, from: :api_history_event) do
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_ACTIVITY_TASK_CANCEL_REQUESTED }
+ activity_task_cancel_requested_event_attributes do |attrs|
+ Temporalio::Api::History::V1::ActivityTaskCancelRequestedEventAttributes.new(
+ scheduled_event_id: attrs[:event_id] - 1,
+ workflow_task_completed_event_id: attrs[:event_id] - 2
+ )
+ end
+end
+
Fabricator(:api_timer_started_event, from: :api_history_event) do
- event_type { Temporal::Api::Enums::V1::EventType::EVENT_TYPE_TIMER_STARTED }
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_TIMER_STARTED }
timer_started_event_attributes do |attrs|
- Temporal::Api::History::V1::TimerStartedEventAttributes.new(
+ Temporalio::Api::History::V1::TimerStartedEventAttributes.new(
timer_id: attrs[:event_id].to_s,
start_to_fire_timeout: 10,
workflow_task_completed_event_id: attrs[:event_id] - 1
@@ -127,9 +165,9 @@
end
Fabricator(:api_timer_fired_event, from: :api_history_event) do
- event_type { Temporal::Api::Enums::V1::EventType::EVENT_TYPE_TIMER_FIRED }
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_TIMER_FIRED }
timer_fired_event_attributes do |attrs|
- Temporal::Api::History::V1::TimerFiredEventAttributes.new(
+ Temporalio::Api::History::V1::TimerFiredEventAttributes.new(
timer_id: attrs[:event_id].to_s,
started_event_id: attrs[:event_id] - 4
)
@@ -137,9 +175,9 @@
end
Fabricator(:api_timer_canceled_event, from: :api_history_event) do
- event_type { Temporal::Api::Enums::V1::EventType::EVENT_TYPE_TIMER_CANCELED }
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_TIMER_CANCELED }
timer_canceled_event_attributes do |attrs|
- Temporal::Api::History::V1::TimerCanceledEventAttributes.new(
+ Temporalio::Api::History::V1::TimerCanceledEventAttributes.new(
timer_id: attrs[:event_id].to_s,
started_event_id: attrs[:event_id] - 4,
workflow_task_completed_event_id: attrs[:event_id] - 1,
@@ -147,3 +185,39 @@
)
end
end
+
+Fabricator(:api_upsert_search_attributes_event, from: :api_history_event) do
+ transient :search_attributes
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_UPSERT_WORKFLOW_SEARCH_ATTRIBUTES }
+ upsert_workflow_search_attributes_event_attributes do |attrs|
+ indexed_fields = attrs[:search_attributes] ? TEST_CONVERTER.to_payload_map(attrs[:search_attributes]) : nil
+ Temporalio::Api::History::V1::UpsertWorkflowSearchAttributesEventAttributes.new(
+ workflow_task_completed_event_id: attrs[:event_id] - 1,
+ search_attributes: Temporalio::Api::Common::V1::SearchAttributes.new(
+ indexed_fields: indexed_fields
+ )
+ )
+ end
+end
+
+Fabricator(:api_marker_recorded_event, from: :api_history_event) do
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_MARKER_RECORDED }
+ marker_recorded_event_attributes do |attrs|
+ Temporalio::Api::History::V1::MarkerRecordedEventAttributes.new(
+ workflow_task_completed_event_id: attrs[:event_id] - 1,
+ marker_name: 'SIDE_EFFECT',
+ details: TEST_CONVERTER.to_payload_map({})
+ )
+ end
+end
+
+Fabricator(:api_workflow_execution_signaled_event, from: :api_history_event) do
+ event_type { Temporalio::Api::Enums::V1::EventType::EVENT_TYPE_WORKFLOW_EXECUTION_SIGNALED }
+ workflow_execution_signaled_event_attributes do
+ Temporalio::Api::History::V1::WorkflowExecutionSignaledEventAttributes.new(
+ signal_name: 'a_signal',
+ input: nil,
+ identity: 'test-worker@test-host'
+ )
+ end
+end
diff --git a/spec/fabricators/grpc/memo_fabricator.rb b/spec/fabricators/grpc/memo_fabricator.rb
new file mode 100644
index 00000000..cf499c8a
--- /dev/null
+++ b/spec/fabricators/grpc/memo_fabricator.rb
@@ -0,0 +1,7 @@
+Fabricator(:memo, from: Temporalio::Api::Common::V1::Memo) do
+ fields do
+ Google::Protobuf::Map.new(:string, :message, Temporalio::Api::Common::V1::Payload).tap do |m|
+ m['foo'] = TEST_CONVERTER.to_payload('bar')
+ end
+ end
+end
diff --git a/spec/fabricators/grpc/payload_fabricator.rb b/spec/fabricators/grpc/payload_fabricator.rb
index d6476915..9312da42 100644
--- a/spec/fabricators/grpc/payload_fabricator.rb
+++ b/spec/fabricators/grpc/payload_fabricator.rb
@@ -1,3 +1,23 @@
-Fabricator(:api_payload, from: Temporal::Api::Common::V1::Payload) do
+Fabricator(:api_payload, from: Temporalio::Api::Common::V1::Payload) do
metadata { Google::Protobuf::Map.new(:string, :bytes) }
end
+
+Fabricator(:api_payload_nil, from: :api_payload) do
+ metadata do
+ Google::Protobuf::Map.new(:string, :bytes).tap do |m|
+ m['encoding'] = Temporal::Connection::Converter::Payload::Nil::ENCODING
+ end
+ end
+end
+
+Fabricator(:api_payload_bytes, from: :api_payload) do
+ transient :bytes
+
+ metadata do
+ Google::Protobuf::Map.new(:string, :bytes).tap do |m|
+ m['encoding'] = Temporal::Connection::Converter::Payload::Bytes::ENCODING
+ end
+ end
+
+ data { |attrs| attrs.fetch(:bytes, 'foobar') }
+end
diff --git a/spec/fabricators/grpc/payloads_fabricator.rb b/spec/fabricators/grpc/payloads_fabricator.rb
new file mode 100644
index 00000000..a8f3aff0
--- /dev/null
+++ b/spec/fabricators/grpc/payloads_fabricator.rb
@@ -0,0 +1,9 @@
+Fabricator(:api_payloads, from: Temporalio::Api::Common::V1::Payloads) do
+ transient :payloads_array
+
+ payloads do |attrs|
+ Google::Protobuf::RepeatedField.new(:message, Temporalio::Api::Common::V1::Payload).tap do |m|
+ m.concat(Array(attrs.fetch(:payloads_array, Fabricate(:api_payload))))
+ end
+ end
+end
diff --git a/spec/fabricators/grpc/record_activity_heartbeat_fabricator.rb b/spec/fabricators/grpc/record_activity_heartbeat_fabricator.rb
new file mode 100644
index 00000000..e1b6eacf
--- /dev/null
+++ b/spec/fabricators/grpc/record_activity_heartbeat_fabricator.rb
@@ -0,0 +1,3 @@
+Fabricator(:api_record_activity_heartbeat_response, from: Temporalio::Api::WorkflowService::V1::RecordActivityTaskHeartbeatResponse) do
+ cancel_requested false
+end
diff --git a/spec/fabricators/grpc/search_attributes_fabricator.rb b/spec/fabricators/grpc/search_attributes_fabricator.rb
new file mode 100644
index 00000000..1e98516e
--- /dev/null
+++ b/spec/fabricators/grpc/search_attributes_fabricator.rb
@@ -0,0 +1,7 @@
+Fabricator(:search_attributes, from: Temporalio::Api::Common::V1::SearchAttributes) do
+ indexed_fields do
+ Google::Protobuf::Map.new(:string, :message, Temporalio::Api::Common::V1::Payload).tap do |m|
+ m['foo'] = TEST_CONVERTER.to_payload('bar')
+ end
+ end
+end
diff --git a/spec/fabricators/grpc/task_queue_fabricator.rb b/spec/fabricators/grpc/task_queue_fabricator.rb
index 635f7aff..761ef867 100644
--- a/spec/fabricators/grpc/task_queue_fabricator.rb
+++ b/spec/fabricators/grpc/task_queue_fabricator.rb
@@ -1,3 +1,3 @@
-Fabricator(:api_task_queue, from: Temporal::Api::TaskQueue::V1::TaskQueue) do
+Fabricator(:api_task_queue, from: Temporalio::Api::TaskQueue::V1::TaskQueue) do
name 'test-task-queue'
end
diff --git a/spec/fabricators/grpc/workflow_execution_fabricator.rb b/spec/fabricators/grpc/workflow_execution_fabricator.rb
index 385cd508..32150daa 100644
--- a/spec/fabricators/grpc/workflow_execution_fabricator.rb
+++ b/spec/fabricators/grpc/workflow_execution_fabricator.rb
@@ -1,6 +1,6 @@
require 'securerandom'
-Fabricator(:api_workflow_execution, from: Temporal::Api::Common::V1::WorkflowExecution) do
+Fabricator(:api_workflow_execution, from: Temporalio::Api::Common::V1::WorkflowExecution) do
run_id { SecureRandom.uuid }
workflow_id { SecureRandom.uuid }
end
diff --git a/spec/fabricators/grpc/workflow_execution_info_fabricator.rb b/spec/fabricators/grpc/workflow_execution_info_fabricator.rb
index 296bde87..efba5cea 100644
--- a/spec/fabricators/grpc/workflow_execution_info_fabricator.rb
+++ b/spec/fabricators/grpc/workflow_execution_info_fabricator.rb
@@ -1,8 +1,12 @@
-Fabricator(:api_workflow_execution_info, from: Temporal::Api::Workflow::V1::WorkflowExecutionInfo) do
- execution { Fabricate(:api_workflow_execution) }
- type { Fabricate(:api_workflow_type) }
+Fabricator(:api_workflow_execution_info, from: Temporalio::Api::Workflow::V1::WorkflowExecutionInfo) do
+ transient :workflow_id, :workflow
+
+ execution { |attrs| Fabricate(:api_workflow_execution, workflow_id: attrs[:workflow_id]) }
+ type { |attrs| Fabricate(:api_workflow_type, name: attrs[:workflow]) }
start_time { Google::Protobuf::Timestamp.new.tap { |t| t.from_time(Time.now) } }
close_time { Google::Protobuf::Timestamp.new.tap { |t| t.from_time(Time.now) } }
- status { Temporal::Api::Enums::V1::WorkflowExecutionStatus::WORKFLOW_EXECUTION_STATUS_COMPLETED }
+ status { Temporalio::Api::Enums::V1::WorkflowExecutionStatus::WORKFLOW_EXECUTION_STATUS_COMPLETED }
history_length { rand(100) }
+ memo { Fabricate(:memo) }
+ search_attributes { Fabricate(:search_attributes) }
end
diff --git a/spec/fabricators/grpc/workflow_execution_started_event_attributes_fabricator.rb b/spec/fabricators/grpc/workflow_execution_started_event_attributes_fabricator.rb
index a3e72609..0c1449fe 100644
--- a/spec/fabricators/grpc/workflow_execution_started_event_attributes_fabricator.rb
+++ b/spec/fabricators/grpc/workflow_execution_started_event_attributes_fabricator.rb
@@ -2,17 +2,18 @@
Fabricator(
:api_workflow_execution_started_event_attributes,
- from: Temporal::Api::History::V1::WorkflowExecutionStartedEventAttributes
+ from: Temporalio::Api::History::V1::WorkflowExecutionStartedEventAttributes
) do
transient :headers
workflow_type { Fabricate(:api_workflow_type) }
original_execution_run_id { SecureRandom.uuid }
attempt 1
+ task_queue { Fabricate(:api_task_queue) }
header do |attrs|
fields = (attrs[:headers] || {}).each_with_object({}) do |(field, value), h|
- h[field] = Temporal.configuration.converter.to_payload(value)
+ h[field] = TEST_CONVERTER.to_payload(value)
end
- Temporal::Api::Common::V1::Header.new(fields: fields)
+ Temporalio::Api::Common::V1::Header.new(fields: fields)
end
end
diff --git a/spec/fabricators/grpc/workflow_query_fabricator.rb b/spec/fabricators/grpc/workflow_query_fabricator.rb
new file mode 100644
index 00000000..f8831d49
--- /dev/null
+++ b/spec/fabricators/grpc/workflow_query_fabricator.rb
@@ -0,0 +1,4 @@
+Fabricator(:api_workflow_query, from: Temporalio::Api::Query::V1::WorkflowQuery) do
+ query_type { 'state' }
+ query_args { TEST_CONVERTER.to_payloads(['']) }
+end
diff --git a/spec/fabricators/grpc/workflow_task_fabricator.rb b/spec/fabricators/grpc/workflow_task_fabricator.rb
index a699428a..0fd5b7c5 100644
--- a/spec/fabricators/grpc/workflow_task_fabricator.rb
+++ b/spec/fabricators/grpc/workflow_task_fabricator.rb
@@ -1,6 +1,6 @@
require 'securerandom'
-Fabricator(:api_workflow_task, from: Temporal::Api::WorkflowService::V1::PollWorkflowTaskQueueResponse) do
+Fabricator(:api_workflow_task, from: Temporalio::Api::WorkflowService::V1::PollWorkflowTaskQueueResponse) do
transient :task_token, :activity_name, :headers, :events
started_event_id { rand(100) }
@@ -9,9 +9,6 @@
workflow_execution { Fabricate(:api_workflow_execution) }
scheduled_time { Google::Protobuf::Timestamp.new.tap { |t| t.from_time(Time.now) } }
started_time { Google::Protobuf::Timestamp.new.tap { |t| t.from_time(Time.now) } }
- history { |attrs| Temporal::Api::History::V1::History.new(events: attrs[:events]) }
-end
-
-Fabricator(:api_paginated_workflow_task, from: :api_workflow_task) do
- next_page_token 'page-1'
+ history { |attrs| Temporalio::Api::History::V1::History.new(events: attrs[:events]) }
+ query { nil }
end
diff --git a/spec/fabricators/grpc/workflow_type_fabricator.rb b/spec/fabricators/grpc/workflow_type_fabricator.rb
index f72ba1de..f7661a30 100644
--- a/spec/fabricators/grpc/workflow_type_fabricator.rb
+++ b/spec/fabricators/grpc/workflow_type_fabricator.rb
@@ -1,3 +1,3 @@
-Fabricator(:api_workflow_type, from: Temporal::Api::Common::V1::WorkflowType) do
+Fabricator(:api_workflow_type, from: Temporalio::Api::Common::V1::WorkflowType) do
name 'TestWorkflow'
end
diff --git a/spec/fabricators/workflow_canceled_event_fabricator.rb b/spec/fabricators/workflow_canceled_event_fabricator.rb
index 666beb7e..57181242 100644
--- a/spec/fabricators/workflow_canceled_event_fabricator.rb
+++ b/spec/fabricators/workflow_canceled_event_fabricator.rb
@@ -1,7 +1,7 @@
-Fabricator(:workflow_canceled_event, from: Temporal::Api::History::V1::HistoryEvent) do
+Fabricator(:workflow_canceled_event, from: Temporalio::Api::History::V1::HistoryEvent) do
event_time { Google::Protobuf::Timestamp.new.tap { |t| t.from_time(Time.now) } }
event_type { :EVENT_TYPE_WORKFLOW_EXECUTION_CANCELED }
workflow_execution_canceled_event_attributes do
- Temporal::Api::History::V1::WorkflowExecutionCanceledEventAttributes.new
+ Temporalio::Api::History::V1::WorkflowExecutionCanceledEventAttributes.new
end
end
diff --git a/spec/fabricators/workflow_completed_event_fabricator.rb b/spec/fabricators/workflow_completed_event_fabricator.rb
index 02eb5226..57331f45 100644
--- a/spec/fabricators/workflow_completed_event_fabricator.rb
+++ b/spec/fabricators/workflow_completed_event_fabricator.rb
@@ -1,10 +1,10 @@
-Fabricator(:workflow_completed_event, from: Temporal::Api::History::V1::HistoryEvent) do
+Fabricator(:workflow_completed_event, from: Temporalio::Api::History::V1::HistoryEvent) do
transient :result
event_time { Google::Protobuf::Timestamp.new.tap { |t| t.from_time(Time.now) } }
event_type { :EVENT_TYPE_WORKFLOW_EXECUTION_COMPLETED }
workflow_execution_completed_event_attributes do |attrs|
- Temporal::Api::History::V1::WorkflowExecutionCompletedEventAttributes.new(
+ Temporalio::Api::History::V1::WorkflowExecutionCompletedEventAttributes.new(
result: attrs[:result]
)
end
diff --git a/spec/fabricators/workflow_execution_history_fabricator.rb b/spec/fabricators/workflow_execution_history_fabricator.rb
index bf13f965..a969eb7f 100644
--- a/spec/fabricators/workflow_execution_history_fabricator.rb
+++ b/spec/fabricators/workflow_execution_history_fabricator.rb
@@ -1,5 +1,5 @@
-Fabricator(:workflow_execution_history, from: Temporal::Api::WorkflowService::V1::GetWorkflowExecutionHistoryResponse) do
- transient :events
- history { |attrs| Temporal::Api::History::V1::History.new(events: attrs[:events]) }
- next_page_token ''
+Fabricator(:workflow_execution_history, from: Temporalio::Api::WorkflowService::V1::GetWorkflowExecutionHistoryResponse) do
+ transient :events, :_next_page_token
+ history { |attrs| Temporalio::Api::History::V1::History.new(events: attrs[:events]) }
+ next_page_token { |attrs| attrs[:_next_page_token] || '' }
end
diff --git a/spec/fabricators/workflow_metadata_fabricator.rb b/spec/fabricators/workflow_metadata_fabricator.rb
index 5a609bb8..3f3a1b7a 100644
--- a/spec/fabricators/workflow_metadata_fabricator.rb
+++ b/spec/fabricators/workflow_metadata_fabricator.rb
@@ -1,8 +1,15 @@
require 'securerandom'
Fabricator(:workflow_metadata, from: :open_struct) do
+ namespace 'test-namespace'
+ id { SecureRandom.uuid }
name 'TestWorkflow'
run_id { SecureRandom.uuid }
+ parent_id { nil }
+ parent_run_id { nil }
attempt 1
+ task_queue { Fabricate(:api_task_queue) }
+ run_started_at { Time.now }
+ memo { {} }
headers { {} }
end
diff --git a/spec/unit/lib/temporal/activity/context_spec.rb b/spec/unit/lib/temporal/activity/context_spec.rb
index 97ea4fc4..e9bc274b 100644
--- a/spec/unit/lib/temporal/activity/context_spec.rb
+++ b/spec/unit/lib/temporal/activity/context_spec.rb
@@ -1,31 +1,107 @@
require 'temporal/activity/context'
require 'temporal/metadata/activity'
+require 'temporal/scheduled_thread_pool'
describe Temporal::Activity::Context do
- let(:client) { instance_double('Temporal::Client::GRPCClient') }
+ let(:connection) { instance_double('Temporal::Connection::GRPC') }
let(:metadata_hash) { Fabricate(:activity_metadata).to_h }
let(:metadata) { Temporal::Metadata::Activity.new(**metadata_hash) }
+ let(:config) { Temporal::Configuration.new }
let(:task_token) { SecureRandom.uuid }
+ let(:heartbeat_thread_pool) { Temporal::ScheduledThreadPool.new(1, config, {}) }
+ let(:heartbeat_response) { Fabricate(:api_record_activity_heartbeat_response) }
- subject { described_class.new(client, metadata) }
+ subject { described_class.new(connection, metadata, config, heartbeat_thread_pool) }
describe '#heartbeat' do
- before { allow(client).to receive(:record_activity_task_heartbeat) }
+ before { allow(connection).to receive(:record_activity_task_heartbeat).and_return(heartbeat_response) }
it 'records heartbeat' do
subject.heartbeat
- expect(client)
+ expect(connection)
.to have_received(:record_activity_task_heartbeat)
- .with(task_token: metadata.task_token, details: nil)
+ .with(namespace: metadata.namespace, task_token: metadata.task_token, details: nil)
end
it 'records heartbeat with details' do
subject.heartbeat(foo: :bar)
- expect(client)
+ expect(connection)
.to have_received(:record_activity_task_heartbeat)
- .with(task_token: metadata.task_token, details: { foo: :bar })
+ .with(namespace: metadata.namespace, task_token: metadata.task_token, details: { foo: :bar })
+ end
+
+ context 'cancellation' do
+ let(:heartbeat_response) { Fabricate(:api_record_activity_heartbeat_response, cancel_requested: true) }
+ it 'sets when cancelled' do
+ subject.heartbeat
+ expect(subject.cancel_requested).to be(true)
+ end
+ end
+
+ context 'throttling' do
+ context 'skips after the first heartbeat' do
+ let(:metadata_hash) { Fabricate(:activity_metadata, heartbeat_timeout: 30).to_h }
+ it 'discard duplicates after first when quickly completes' do
+ 10.times do |i|
+ subject.heartbeat(iteration: i)
+ end
+
+ expect(connection)
+ .to have_received(:record_activity_task_heartbeat)
+ .with(namespace: metadata.namespace, task_token: metadata.task_token, details: { iteration: 0 })
+ .once
+ end
+ end
+
+ context 'resumes' do
+ let(:metadata_hash) { Fabricate(:activity_metadata, heartbeat_timeout: 0.1).to_h }
+ it 'more heartbeats after time passes' do
+ subject.heartbeat(iteration: 1)
+ subject.heartbeat(iteration: 2) # skipped because 3 will overwrite
+ subject.heartbeat(iteration: 3)
+ sleep 0.1
+ subject.heartbeat(iteration: 4)
+
+ # Shutdown to drain remaining threads
+ heartbeat_thread_pool.shutdown
+
+ expect(connection)
+ .to have_received(:record_activity_task_heartbeat)
+ .ordered
+ .with(namespace: metadata.namespace, task_token: metadata.task_token, details: { iteration: 1 })
+ .with(namespace: metadata.namespace, task_token: metadata.task_token, details: { iteration: 3 })
+ .with(namespace: metadata.namespace, task_token: metadata.task_token, details: { iteration: 4 })
+ end
+ end
+
+ it 'no heartbeat check scheduled when max interval is zero' do
+ config.timeouts = { max_heartbeat_throttle_interval: 0 }
+ subject.heartbeat
+
+ expect(connection)
+ .to have_received(:record_activity_task_heartbeat)
+ .with(namespace: metadata.namespace, task_token: metadata.task_token, details: nil)
+
+ expect(subject.heartbeat_check_scheduled).to be_nil
+ end
+ end
+ end
+
+ describe '#last_heartbeat_throttled' do
+ before { allow(connection).to receive(:record_activity_task_heartbeat).and_return(heartbeat_response) }
+
+ let(:metadata_hash) { Fabricate(:activity_metadata, heartbeat_timeout: 3).to_h }
+
+ it 'true when throttled, false when not' do
+ subject.heartbeat(iteration: 1)
+ expect(subject.last_heartbeat_throttled).to be(false)
+ subject.heartbeat(iteration: 2)
+ expect(subject.last_heartbeat_throttled).to be(true)
+
+ # Shutdown to drain remaining threads
+ heartbeat_thread_pool.shutdown
end
end
@@ -45,7 +121,7 @@
describe '#async?' do
subject { context.async? }
- let(:context) { described_class.new(client, metadata) }
+ let(:context) { described_class.new(connection, metadata, nil, nil) }
context 'when context is sync' do
it { is_expected.to eq(false) }
@@ -107,4 +183,10 @@
expect(subject.headers).to eq('Foo' => 'Bar')
end
end
+
+ describe '#name' do
+ it 'returns the class name of the activity' do
+ expect(subject.name).to eq('TestActivity')
+ end
+ end
end
diff --git a/spec/unit/lib/temporal/activity/poller_spec.rb b/spec/unit/lib/temporal/activity/poller_spec.rb
index 04de13ed..3e5d24c7 100644
--- a/spec/unit/lib/temporal/activity/poller_spec.rb
+++ b/spec/unit/lib/temporal/activity/poller_spec.rb
@@ -1,6 +1,7 @@
require 'temporal/activity/poller'
-require 'temporal/middleware/entry'
require 'temporal/configuration'
+require 'temporal/metric_keys'
+require 'temporal/middleware/entry'
describe Temporal::Activity::Poller do
let(:connection) { instance_double('Temporal::Connection::GRPC', cancel_polling_request: nil) }
@@ -10,53 +11,81 @@
let(:thread_pool) do
instance_double(Temporal::ThreadPool, wait_for_available_threads: nil, shutdown: nil)
end
+ let(:heartbeat_thread_pool) do
+ instance_double(Temporal::ScheduledThreadPool, shutdown: nil)
+ end
let(:config) { Temporal::Configuration.new }
let(:middleware_chain) { instance_double(Temporal::Middleware::Chain) }
let(:middleware) { [] }
+ let(:busy_wait_delay) {0.01}
subject { described_class.new(namespace, task_queue, lookup, config, middleware) }
before do
allow(Temporal::Connection).to receive(:generate).and_return(connection)
allow(Temporal::ThreadPool).to receive(:new).and_return(thread_pool)
+ allow(Temporal::ScheduledThreadPool).to receive(:new).and_return(heartbeat_thread_pool)
allow(Temporal::Middleware::Chain).to receive(:new).and_return(middleware_chain)
allow(Temporal.metrics).to receive(:timing)
+ allow(Temporal.metrics).to receive(:increment)
end
- describe '#start' do
- it 'measures time between polls' do
- allow(subject).to receive(:shutting_down?).and_return(false, false, true)
- allow(connection).to receive(:poll_activity_task_queue).and_return(nil)
+ # poller will receive task times times, and nil thereafter.
+ # poller will be shut down after that
+ def poll(task, times: 1)
+ polled_times = 0
+ allow(connection).to receive(:poll_activity_task_queue) do
+ polled_times += 1
+ if polled_times <= times
+ task
+ else
+ nil
+ end
+ end
- subject.start
+ subject.start
- # stop poller before inspecting
- subject.stop_polling; subject.wait
+ while polled_times < times
+ sleep(busy_wait_delay)
+ end
+ # stop poller before inspecting
+ subject.stop_polling; subject.wait
+ polled_times
+ end
- expect(connection)
- .to have_received(:poll_activity_task_queue)
- .with(namespace: namespace, task_queue: task_queue)
- .twice
+ describe '#start' do
+ it 'measures time between polls' do
+ # if it doesn't poll, this test will loop forever
+ times = poll(nil, times: 2)
+ expect(times).to be >= 2
end
it 'reports time since last poll' do
- allow(subject).to receive(:shutting_down?).and_return(false, false, true)
- allow(connection).to receive(:poll_activity_task_queue).and_return(nil)
+ poll(nil, times: 2)
- subject.start
+ expect(Temporal.metrics)
+ .to have_received(:timing)
+ .with(
+ Temporal::MetricKeys::ACTIVITY_POLLER_TIME_SINCE_LAST_POLL,
+ an_instance_of(Integer),
+ namespace: namespace,
+ task_queue: task_queue
+ )
+ .at_least(:twice)
+ end
- # stop poller before inspecting
- subject.stop_polling; subject.wait
+ it 'reports polling completed with received_task false' do
+ poll(nil, times: 2)
expect(Temporal.metrics)
- .to have_received(:timing)
+ .to have_received(:increment)
.with(
- 'activity_poller.time_since_last_poll',
- an_instance_of(Fixnum),
+ Temporal::MetricKeys::ACTIVITY_POLLER_POLL_COMPLETED,
+ received_task: 'false',
namespace: namespace,
task_queue: task_queue
)
- .twice
+ .at_least(:twice)
end
context 'when an activity task is received' do
@@ -64,36 +93,43 @@
let(:task) { Fabricate(:api_activity_task) }
before do
- allow(subject).to receive(:shutting_down?).and_return(false, true)
- allow(connection).to receive(:poll_activity_task_queue).and_return(task)
allow(Temporal::Activity::TaskProcessor).to receive(:new).and_return(task_processor)
allow(thread_pool).to receive(:schedule).and_yield
end
it 'schedules task processing using a ThreadPool' do
- subject.start
-
- # stop poller before inspecting
- subject.stop_polling; subject.wait
+ poll(task)
expect(thread_pool).to have_received(:schedule)
end
it 'uses TaskProcessor to process tasks' do
- subject.start
-
- # stop poller before inspecting
- subject.stop_polling; subject.wait
+ poll(task)
expect(Temporal::Activity::TaskProcessor)
.to have_received(:new)
- .with(task, namespace, lookup, middleware_chain, config)
+ .with(task, task_queue, namespace, lookup, middleware_chain, config, heartbeat_thread_pool)
expect(task_processor).to have_received(:process)
end
+ it 'reports polling completed with received_task true' do
+ poll(task)
+
+ expect(Temporal.metrics)
+ .to have_received(:increment)
+ .with(
+ Temporal::MetricKeys::ACTIVITY_POLLER_POLL_COMPLETED,
+ received_task: 'true',
+ namespace: namespace,
+ task_queue: task_queue
+ )
+ .once
+ end
+
context 'with middleware configured' do
class TestPollerMiddleware
def initialize(_); end
+
def call(_); end
end
@@ -102,37 +138,133 @@ def call(_); end
let(:entry_2) { Temporal::Middleware::Entry.new(TestPollerMiddleware, '2') }
it 'initializes middleware chain and passes it down to TaskProcessor' do
- subject.start
-
- # stop poller before inspecting
- subject.stop_polling; subject.wait
+ poll(task)
expect(Temporal::Middleware::Chain).to have_received(:new).with(middleware)
expect(Temporal::Activity::TaskProcessor)
.to have_received(:new)
- .with(task, namespace, lookup, middleware_chain, config)
+ .with(task, task_queue, namespace, lookup, middleware_chain, config, heartbeat_thread_pool)
end
end
end
context 'when connection is unable to poll' do
before do
- allow(subject).to receive(:shutting_down?).and_return(false, true)
- allow(connection).to receive(:poll_activity_task_queue).and_raise(StandardError)
+ allow(subject).to receive(:sleep).and_return(nil)
end
it 'logs' do
allow(Temporal.logger).to receive(:error)
+ polled = false
+ allow(connection).to receive(:poll_activity_task_queue) do
+ if !polled
+ polled = true
+ raise StandardError
+ end
+ end
+
subject.start
+ while !polled
+ sleep(busy_wait_delay)
+ end
# stop poller before inspecting
subject.stop_polling; subject.wait
expect(Temporal.logger)
.to have_received(:error)
- .with('Unable to poll activity task queue', { namespace: 'test-namespace', task_queue: 'test-task-queue', error: '#'})
+ .with('Unable to poll activity task queue', { namespace: 'test-namespace', task_queue: 'test-task-queue', error: '#' })
+ end
+
+ it 'does not sleep' do
+ polled = false
+ allow(connection).to receive(:poll_activity_task_queue) do
+ if !polled
+ polled = true
+ raise StandardError
+ end
+ end
+
+ subject.start
+ while !polled
+ sleep(busy_wait_delay)
+ end
+
+ # stop poller before inspecting
+ subject.stop_polling; subject.wait
+
+ expect(subject).to have_received(:sleep).with(0).once
+ end
+ end
+ end
+
+ context 'when max_tasks_per_second is set' do
+ subject do
+ described_class.new(
+ namespace,
+ task_queue,
+ lookup,
+ config,
+ middleware,
+ {
+ max_tasks_per_second: 32
+ }
+ )
+ end
+
+ it 'sends PollActivityTaskQueue requests with the configured task rate-limit' do
+ times = poll(nil, times: 2)
+ expect(times).to be >= 2
+
+ expect(connection).to have_received(:poll_activity_task_queue)
+ .with(
+ namespace: namespace,
+ task_queue: task_queue,
+ max_tasks_per_second: 32
+ )
+ .at_least(2)
+ .times
+ end
+ end
+
+
+ context 'when connection is unable to poll and poll_retry_seconds is set' do
+ subject do
+ described_class.new(
+ namespace,
+ task_queue,
+ lookup,
+ config,
+ middleware,
+ {
+ poll_retry_seconds: 5
+ }
+ )
+ end
+
+ before do
+ allow(subject).to receive(:sleep).and_return(nil)
+ end
+
+ it 'sleeps' do
+ polled = false
+ allow(connection).to receive(:poll_activity_task_queue) do
+ if !polled
+ polled = true
+ raise StandardError
+ end
+ end
+
+ subject.start
+ while !polled
+ sleep(busy_wait_delay)
end
+
+ # stop poller before inspecting
+ subject.stop_polling; subject.wait
+
+ expect(subject).to have_received(:sleep).with(5).once
end
end
diff --git a/spec/unit/lib/temporal/activity/task_processor_spec.rb b/spec/unit/lib/temporal/activity/task_processor_spec.rb
index 91e1eccf..6999ba60 100644
--- a/spec/unit/lib/temporal/activity/task_processor_spec.rb
+++ b/spec/unit/lib/temporal/activity/task_processor_spec.rb
@@ -1,28 +1,37 @@
require 'temporal/activity/task_processor'
-require 'temporal/middleware/chain'
require 'temporal/configuration'
+require 'temporal/metric_keys'
+require 'temporal/middleware/chain'
+require 'temporal/scheduled_thread_pool'
describe Temporal::Activity::TaskProcessor do
- subject { described_class.new(task, namespace, lookup, middleware_chain, config) }
+ subject { described_class.new(task, task_queue, namespace, lookup, middleware_chain, config, heartbeat_thread_pool) }
let(:namespace) { 'test-namespace' }
+ let(:task_queue) { 'test-queue' }
let(:lookup) { instance_double('Temporal::ExecutableLookup', find: nil) }
let(:task) do
Fabricate(
:api_activity_task,
activity_name: activity_name,
- input: Temporal.configuration.converter.to_payloads(input)
+ input: config.converter.to_payloads(input)
)
end
- let(:metadata) { Temporal::Metadata.generate(Temporal::Metadata::ACTIVITY_TYPE, task) }
+ let(:metadata) { Temporal::Metadata.generate_activity_metadata(task, namespace, config.converter) }
+ let(:workflow_name) { task.workflow_type.name }
let(:activity_name) { 'TestActivity' }
let(:connection) { instance_double('Temporal::Connection::GRPC') }
let(:middleware_chain) { Temporal::Middleware::Chain.new }
let(:config) { Temporal::Configuration.new }
- let(:input) { ['arg1', 'arg2'] }
+ let(:heartbeat_thread_pool) { Temporal::ScheduledThreadPool.new(2, config, {}) }
+ let(:input) { %w[arg1 arg2] }
describe '#process' do
- let(:context) { instance_double('Temporal::Activity::Context', async?: false) }
+ let(:heartbeat_check_scheduled) { nil }
+ let(:context) do
+ instance_double('Temporal::Activity::Context', async?: false,
+ heartbeat_check_scheduled: heartbeat_check_scheduled)
+ end
before do
allow(Temporal::Connection)
@@ -30,10 +39,11 @@
.with(config.for_connection)
.and_return(connection)
allow(Temporal::Metadata)
- .to receive(:generate)
- .with(Temporal::Metadata::ACTIVITY_TYPE, task, namespace)
+ .to receive(:generate_activity_metadata)
+ .with(task, namespace, config.converter)
.and_return(metadata)
- allow(Temporal::Activity::Context).to receive(:new).with(connection, metadata).and_return(context)
+ allow(Temporal::Activity::Context).to receive(:new).with(connection, metadata, config,
+ heartbeat_thread_pool).and_return(context)
allow(connection).to receive(:respond_activity_task_completed)
allow(connection).to receive(:respond_activity_task_failed)
@@ -53,6 +63,7 @@
expect(connection)
.to have_received(:respond_activity_task_failed)
.with(
+ namespace: namespace,
task_token: task.task_token,
exception: an_instance_of(Temporal::ActivityNotRegistered)
)
@@ -70,7 +81,7 @@
reported_error = nil
reported_metadata = nil
- Temporal.configuration.on_error do |error, metadata: nil|
+ config.on_error do |error, metadata: nil|
reported_error = error
reported_metadata = metadata.to_h
end
@@ -109,7 +120,18 @@
expect(connection)
.to have_received(:respond_activity_task_completed)
- .with(task_token: task.task_token, result: 'result')
+ .with(namespace: namespace, task_token: task.task_token, result: 'result')
+ end
+
+ context 'when there is an outstanding scheduled heartbeat' do
+ let(:heartbeat_check_scheduled) do
+ Temporal::ScheduledThreadPool::ScheduledItem.new(id: :foo, canceled: false)
+ end
+ it 'it gets canceled' do
+ subject.process
+
+ expect(heartbeat_check_scheduled.canceled).to eq(true)
+ end
end
it 'ignores connection exception' do
@@ -125,7 +147,15 @@
expect(Temporal.metrics)
.to have_received(:timing)
- .with('activity_task.queue_time', an_instance_of(Integer), activity: activity_name)
+ .with(
+ Temporal::MetricKeys::ACTIVITY_TASK_QUEUE_TIME,
+ an_instance_of(Integer),
+ hash_including({
+ activity: activity_name,
+ namespace: namespace,
+ workflow: workflow_name
+ })
+ )
end
it 'sends latency metric' do
@@ -133,7 +163,14 @@
expect(Temporal.metrics)
.to have_received(:timing)
- .with('activity_task.latency', an_instance_of(Integer), activity: activity_name)
+ .with(
+ Temporal::MetricKeys::ACTIVITY_TASK_LATENCY,
+ an_instance_of(Integer),
+ activity: activity_name,
+ namespace: namespace,
+ task_queue: task_queue,
+ workflow: workflow_name
+ )
end
context 'with async activity' do
@@ -170,6 +207,7 @@
expect(connection)
.to have_received(:respond_activity_task_failed)
.with(
+ namespace: namespace,
task_token: task.task_token,
exception: exception
)
@@ -187,7 +225,7 @@
reported_error = nil
reported_metadata = nil
- Temporal.configuration.on_error do |error, metadata: nil|
+ config.on_error do |error, metadata: nil|
reported_error = error
reported_metadata = metadata
end
@@ -203,7 +241,15 @@
expect(Temporal.metrics)
.to have_received(:timing)
- .with('activity_task.queue_time', an_instance_of(Integer), activity: activity_name)
+ .with(
+ Temporal::MetricKeys::ACTIVITY_TASK_QUEUE_TIME,
+ an_instance_of(Integer),
+ hash_including({
+ activity: activity_name,
+ namespace: namespace,
+ workflow: workflow_name
+ })
+ )
end
it 'sends latency metric' do
@@ -211,7 +257,14 @@
expect(Temporal.metrics)
.to have_received(:timing)
- .with('activity_task.latency', an_instance_of(Integer), activity: activity_name)
+ .with(
+ Temporal::MetricKeys::ACTIVITY_TASK_LATENCY,
+ an_instance_of(Integer),
+ activity: activity_name,
+ namespace: namespace,
+ task_queue: task_queue,
+ workflow: workflow_name
+ )
end
context 'with ScriptError exception' do
@@ -223,6 +276,7 @@
expect(connection)
.to have_received(:respond_activity_task_failed)
.with(
+ namespace: namespace,
task_token: task.task_token,
exception: exception
)
@@ -247,7 +301,11 @@
expect(connection)
.to have_received(:respond_activity_task_failed)
- .with(task_token: task.task_token, exception: exception)
+ .with(
+ namespace: namespace,
+ task_token: task.task_token,
+ exception: exception
+ )
end
end
end
diff --git a/spec/unit/lib/temporal/activity_spec.rb b/spec/unit/lib/temporal/activity_spec.rb
index 47a0e8b0..f7dc5662 100644
--- a/spec/unit/lib/temporal/activity_spec.rb
+++ b/spec/unit/lib/temporal/activity_spec.rb
@@ -4,15 +4,28 @@
describe Temporal::Activity do
it_behaves_like 'an executable'
+ class ArgsActivity < Temporal::Activity
+ def execute(a)
+ 'args result'
+ end
+ end
+
+ class KwargsActivity < Temporal::Activity
+ def execute(a, b:, c:)
+ 'kwargs result'
+ end
+ end
+
subject { described_class.new(context) }
let(:context) { instance_double('Temporal::Activity::Context') }
describe '.execute_in_context' do
+ subject { ArgsActivity.new(context) }
+
let(:input) { ['test'] }
before do
allow(described_class).to receive(:new).and_return(subject)
- allow(subject).to receive(:execute).and_return('result')
end
it 'passes the context' do
@@ -22,13 +35,41 @@
end
it 'calls #execute' do
- described_class.execute_in_context(context, input)
+ expect(subject).to receive(:execute).with(*input)
- expect(subject).to have_received(:execute).with(*input)
+ described_class.execute_in_context(context, input)
end
it 'returns #execute result' do
- expect(described_class.execute_in_context(context, input)).to eq('result')
+ expect(described_class.execute_in_context(context, input)).to eq('args result')
+ end
+
+ context 'when using keyword arguments' do
+ subject { KwargsActivity.new(context) }
+
+ let(:input) { ['test', { b: 'b', c: 'c' }] }
+
+ it 'passes the context' do
+ described_class.execute_in_context(context, input)
+
+ expect(described_class).to have_received(:new).with(context)
+ end
+
+ it 'calls #execute' do
+ expect(subject).to receive(:execute).with('test', b: 'b', c: 'c')
+
+ described_class.execute_in_context(context, input)
+ end
+
+ it 'does not raise an ArgumentError' do
+ expect {
+ described_class.execute_in_context(context, input)
+ }.not_to raise_error
+ end
+
+ it 'returns #execute result' do
+ expect(described_class.execute_in_context(context, input)).to eq('kwargs result')
+ end
end
end
diff --git a/spec/unit/lib/temporal/client_spec.rb b/spec/unit/lib/temporal/client_spec.rb
index 92ec094b..31dc4a78 100644
--- a/spec/unit/lib/temporal/client_spec.rb
+++ b/spec/unit/lib/temporal/client_spec.rb
@@ -4,11 +4,12 @@
require 'temporal/workflow'
require 'temporal/workflow/history'
require 'temporal/connection/grpc'
+require 'temporal/reset_reapply_type'
describe Temporal::Client do
subject { described_class.new(config) }
- let(:config) { Temporal::Configuration.new }
+ let(:config) { Temporal::Configuration.new.tap { |c| c.namespace = namespace } }
let(:connection) { instance_double(Temporal::Connection::GRPC) }
let(:namespace) { 'default-test-namespace' }
let(:workflow_id) { SecureRandom.uuid }
@@ -34,11 +35,41 @@ class TestStartWorkflow < Temporal::Workflow
describe '#start_workflow' do
let(:temporal_response) do
- Temporal::Api::WorkflowService::V1::StartWorkflowExecutionResponse.new(run_id: 'xxx')
+ Temporalio::Api::WorkflowService::V1::StartWorkflowExecutionResponse.new(run_id: 'xxx')
end
before { allow(connection).to receive(:start_workflow_execution).and_return(temporal_response) }
+ context 'with header propagator' do
+ class TestHeaderPropagator
+ def inject!(header)
+ header['test'] = 'asdf'
+ end
+ end
+
+ it 'updates the header' do
+ config.add_header_propagator(TestHeaderPropagator)
+ subject.start_workflow(TestStartWorkflow, 42)
+ expect(connection)
+ .to have_received(:start_workflow_execution)
+ .with(
+ namespace: 'default-test-namespace',
+ workflow_id: an_instance_of(String),
+ workflow_name: 'TestStartWorkflow',
+ task_queue: 'default-test-task-queue',
+ input: [42],
+ task_timeout: config.timeouts[:task],
+ run_timeout: config.timeouts[:run],
+ execution_timeout: config.timeouts[:execution],
+ workflow_id_reuse_policy: nil,
+ headers: { 'test' => 'asdf' },
+ memo: {},
+ search_attributes: {},
+ start_delay: 0
+ )
+ end
+ end
+
context 'using a workflow class' do
it 'returns run_id' do
result = subject.start_workflow(TestStartWorkflow, 42)
@@ -57,11 +88,14 @@ class TestStartWorkflow < Temporal::Workflow
workflow_name: 'TestStartWorkflow',
task_queue: 'default-test-task-queue',
input: [42],
- task_timeout: Temporal.configuration.timeouts[:task],
- run_timeout: Temporal.configuration.timeouts[:run],
- execution_timeout: Temporal.configuration.timeouts[:execution],
+ task_timeout: config.timeouts[:task],
+ run_timeout: config.timeouts[:run],
+ execution_timeout: config.timeouts[:execution],
workflow_id_reuse_policy: nil,
- headers: {}
+ headers: {},
+ memo: {},
+ search_attributes: {},
+ start_delay: 0
)
end
@@ -73,7 +107,11 @@ class TestStartWorkflow < Temporal::Workflow
name: 'test-workflow',
namespace: 'test-namespace',
task_queue: 'test-task-queue',
- headers: { 'Foo' => 'Bar' }
+ headers: { 'Foo' => 'Bar' },
+ workflow_id_reuse_policy: :reject,
+ memo: { 'MemoKey1' => 'MemoValue1' },
+ search_attributes: { 'SearchAttribute1' => 256 },
+ start_delay: 10
}
)
@@ -85,11 +123,14 @@ class TestStartWorkflow < Temporal::Workflow
workflow_name: 'test-workflow',
task_queue: 'test-task-queue',
input: [42],
- task_timeout: Temporal.configuration.timeouts[:task],
- run_timeout: Temporal.configuration.timeouts[:run],
- execution_timeout: Temporal.configuration.timeouts[:execution],
- workflow_id_reuse_policy: nil,
- headers: { 'Foo' => 'Bar' }
+ task_timeout: config.timeouts[:task],
+ run_timeout: config.timeouts[:run],
+ execution_timeout: config.timeouts[:execution],
+ workflow_id_reuse_policy: :reject,
+ headers: { 'Foo' => 'Bar' },
+ memo: { 'MemoKey1' => 'MemoValue1' },
+ search_attributes: { 'SearchAttribute1' => 256 },
+ start_delay: 10
)
end
@@ -110,11 +151,14 @@ class TestStartWorkflow < Temporal::Workflow
workflow_name: 'test-workflow',
task_queue: 'default-test-task-queue',
input: [42, { arg_1: 1, arg_2: 2 }],
- task_timeout: Temporal.configuration.timeouts[:task],
- run_timeout: Temporal.configuration.timeouts[:run],
- execution_timeout: Temporal.configuration.timeouts[:execution],
+ task_timeout: config.timeouts[:task],
+ run_timeout: config.timeouts[:run],
+ execution_timeout: config.timeouts[:execution],
workflow_id_reuse_policy: nil,
- headers: {}
+ headers: {},
+ memo: {},
+ search_attributes: {},
+ start_delay: 0
)
end
@@ -129,11 +173,14 @@ class TestStartWorkflow < Temporal::Workflow
workflow_name: 'TestStartWorkflow',
task_queue: 'default-test-task-queue',
input: [42],
- task_timeout: Temporal.configuration.timeouts[:task],
- run_timeout: Temporal.configuration.timeouts[:run],
- execution_timeout: Temporal.configuration.timeouts[:execution],
+ task_timeout: config.timeouts[:task],
+ run_timeout: config.timeouts[:run],
+ execution_timeout: config.timeouts[:execution],
workflow_id_reuse_policy: nil,
- headers: {}
+ headers: {},
+ memo: {},
+ search_attributes: {},
+ start_delay: 0
)
end
@@ -150,11 +197,14 @@ class TestStartWorkflow < Temporal::Workflow
workflow_name: 'TestStartWorkflow',
task_queue: 'default-test-task-queue',
input: [42],
- task_timeout: Temporal.configuration.timeouts[:task],
- run_timeout: Temporal.configuration.timeouts[:run],
- execution_timeout: Temporal.configuration.timeouts[:execution],
+ task_timeout: config.timeouts[:task],
+ run_timeout: config.timeouts[:run],
+ execution_timeout: config.timeouts[:execution],
workflow_id_reuse_policy: :allow,
- headers: {}
+ headers: {},
+ memo: {},
+ search_attributes: {},
+ start_delay: 0
)
end
end
@@ -175,19 +225,102 @@ class TestStartWorkflow < Temporal::Workflow
workflow_name: 'test-workflow',
task_queue: 'test-task-queue',
input: [42],
- task_timeout: Temporal.configuration.timeouts[:task],
- run_timeout: Temporal.configuration.timeouts[:run],
- execution_timeout: Temporal.configuration.timeouts[:execution],
+ task_timeout: config.timeouts[:task],
+ run_timeout: config.timeouts[:run],
+ execution_timeout: config.timeouts[:execution],
workflow_id_reuse_policy: nil,
- headers: {}
+ headers: {},
+ memo: {},
+ search_attributes: {},
+ start_delay: 0
)
end
end
end
+ describe '#start_workflow with a signal' do
+ let(:temporal_response) do
+ Temporalio::Api::WorkflowService::V1::SignalWithStartWorkflowExecutionResponse.new(run_id: 'xxx')
+ end
+
+ before { allow(connection).to receive(:signal_with_start_workflow_execution).and_return(temporal_response) }
+
+ def expect_signal_with_start(expected_arguments, expected_signal_argument)
+ expect(connection)
+ .to have_received(:signal_with_start_workflow_execution)
+ .with(
+ namespace: 'default-test-namespace',
+ workflow_id: an_instance_of(String),
+ workflow_name: 'TestStartWorkflow',
+ task_queue: 'default-test-task-queue',
+ input: expected_arguments,
+ task_timeout: config.timeouts[:task],
+ run_timeout: config.timeouts[:run],
+ execution_timeout: config.timeouts[:execution],
+ workflow_id_reuse_policy: nil,
+ headers: {},
+ memo: {},
+ search_attributes: {},
+ signal_name: 'the question',
+ signal_input: expected_signal_argument,
+ start_delay: 0
+ )
+ end
+
+ it 'starts a workflow with a signal and no arguments' do
+ subject.start_workflow(
+ TestStartWorkflow,
+ options: { signal_name: 'the question' }
+ )
+
+ expect_signal_with_start([], nil)
+ end
+
+ it 'starts a workflow with a signal and one scalar argument' do
+ signal_input = 'what do you get if you multiply six by nine?'
+ subject.start_workflow(
+ TestStartWorkflow,
+ 42,
+ options: {
+ signal_name: 'the question',
+ signal_input: signal_input,
+ }
+ )
+
+ expect_signal_with_start([42], signal_input)
+ end
+
+ it 'starts a workflow with a signal and multiple arguments and signal_inputs' do
+ signal_input = ['what do you get', 'if you multiply six by nine?']
+ subject.start_workflow(
+ TestStartWorkflow,
+ 42,
+ 43,
+ options: {
+ signal_name: 'the question',
+ # signals can't have multiple scalar args, but you can pass an array
+ signal_input: signal_input
+ }
+ )
+
+ expect_signal_with_start([42, 43], signal_input)
+ end
+
+ it 'raises when signal_input is given but signal_name is not' do
+ expect do
+ subject.start_workflow(
+ TestStartWorkflow,
+ [42, 54],
+ [43, 55],
+ options: { signal_input: 'what do you get if you multiply six by nine?', }
+ )
+ end.to raise_error(ArgumentError)
+ end
+ end
+
describe '#schedule_workflow' do
let(:temporal_response) do
- Temporal::Api::WorkflowService::V1::StartWorkflowExecutionResponse.new(run_id: 'xxx')
+ Temporalio::Api::WorkflowService::V1::StartWorkflowExecutionResponse.new(run_id: 'xxx')
end
before { allow(connection).to receive(:start_workflow_execution).and_return(temporal_response) }
@@ -204,11 +337,13 @@ class TestStartWorkflow < Temporal::Workflow
task_queue: 'default-test-task-queue',
cron_schedule: '* * * * *',
input: [42],
- task_timeout: Temporal.configuration.timeouts[:task],
- run_timeout: Temporal.configuration.timeouts[:run],
- execution_timeout: Temporal.configuration.timeouts[:execution],
+ task_timeout: config.timeouts[:task],
+ run_timeout: config.timeouts[:run],
+ execution_timeout: config.timeouts[:execution],
workflow_id_reuse_policy: nil,
- headers: {}
+ memo: {},
+ search_attributes: {},
+ headers: {},
)
end
end
@@ -221,7 +356,7 @@ class TestStartWorkflow < Temporal::Workflow
expect(connection)
.to have_received(:register_namespace)
- .with(name: 'new-namespace', description: nil)
+ .with(name: 'new-namespace', description: nil, is_global: false, data: nil, retention_period: 10)
end
it 'registers namespace with the specified name and description' do
@@ -229,10 +364,21 @@ class TestStartWorkflow < Temporal::Workflow
expect(connection)
.to have_received(:register_namespace)
- .with(name: 'new-namespace', description: 'namespace description')
+ .with(name: 'new-namespace', description: 'namespace description', is_global: false, data: nil, retention_period: 10)
end
end
+ describe '#describe_namespace' do
+ before { allow(connection).to receive(:describe_namespace).and_return(Temporalio::Api::WorkflowService::V1::DescribeNamespaceResponse.new) }
+
+ it 'passes the namespace to the connection' do
+ result = subject.describe_namespace('new-namespace')
+
+ expect(connection)
+ .to have_received(:describe_namespace)
+ .with(name: 'new-namespace')
+ end
+ end
describe '#signal_workflow' do
before { allow(connection).to receive(:signal_workflow_execution).and_return(nil) }
@@ -244,7 +390,7 @@ class TestStartWorkflow < Temporal::Workflow
.to have_received(:signal_workflow_execution)
.with(
namespace: 'default-test-namespace',
- signal: 'signal',
+ signal: 'signal',
workflow_id: 'workflow_id',
run_id: 'run_id',
input: nil,
@@ -258,7 +404,7 @@ class TestStartWorkflow < Temporal::Workflow
.to have_received(:signal_workflow_execution)
.with(
namespace: 'default-test-namespace',
- signal: 'signal',
+ signal: 'signal',
workflow_id: 'workflow_id',
run_id: 'run_id',
input: 'input',
@@ -272,7 +418,7 @@ class TestStartWorkflow < Temporal::Workflow
.to have_received(:signal_workflow_execution)
.with(
namespace: 'other-test-namespace',
- signal: 'signal',
+ signal: 'signal',
workflow_id: 'workflow_id',
run_id: 'run_id',
input: nil,
@@ -312,7 +458,7 @@ class NamespacedWorkflow < Temporal::Workflow
)
end
- it 'can override the namespace' do
+ it 'can override the namespace' do
completed_event = Fabricate(:workflow_completed_event, result: nil)
response = Fabricate(:workflow_execution_history, events: [completed_event])
@@ -343,9 +489,9 @@ class NamespacedWorkflow < Temporal::Workflow
['string', 'a result'],
].each do |(type, expected_result)|
it "completes and returns a #{type}" do
- payload = Temporal::Api::Common::V1::Payloads.new(
+ payload = Temporalio::Api::Common::V1::Payloads.new(
payloads: [
- Temporal.configuration.converter.to_payload(expected_result)
+ config.converter.to_payload(expected_result)
],
)
completed_event = Fabricate(:workflow_completed_event, result: payload)
@@ -397,7 +543,7 @@ class NamespacedWorkflow < Temporal::Workflow
end.to raise_error(Temporal::WorkflowCanceled)
end
- it 'raises TimeoutError when the server times out' do
+ it 'raises TimeoutError when the server times out' do
response = Fabricate(:workflow_execution_history, events: [])
expect(connection)
.to receive(:get_workflow_execution_history)
@@ -423,7 +569,7 @@ class NamespacedWorkflow < Temporal::Workflow
describe '#reset_workflow' do
let(:temporal_response) do
- Temporal::Api::WorkflowService::V1::ResetWorkflowExecutionResponse.new(run_id: 'xxx')
+ Temporalio::Api::WorkflowService::V1::ResetWorkflowExecutionResponse.new(run_id: 'xxx')
end
let(:history) do
Temporal::Workflow::History.new([
@@ -474,7 +620,32 @@ class NamespacedWorkflow < Temporal::Workflow
workflow_id: '123',
run_id: '1234',
reason: 'Test reset',
- workflow_task_event_id: workflow_task_id
+ workflow_task_event_id: workflow_task_id,
+ # The request ID will be a random UUID:
+ request_id: anything,
+ reset_reapply_type: :signal
+ )
+ end
+
+ it 'passes through request_id and reset_reapply_type' do
+ subject.reset_workflow(
+ 'default-test-namespace',
+ '123',
+ '1234',
+ workflow_task_id: workflow_task_id,
+ reason: 'Test reset',
+ request_id: 'foo',
+ reset_reapply_type: Temporal::ResetReapplyType::SIGNAL
+ )
+
+ expect(connection).to have_received(:reset_workflow_execution).with(
+ namespace: 'default-test-namespace',
+ workflow_id: '123',
+ run_id: '1234',
+ reason: 'Test reset',
+ workflow_task_event_id: workflow_task_id,
+ request_id: 'foo',
+ reset_reapply_type: :signal
)
end
@@ -499,7 +670,10 @@ class NamespacedWorkflow < Temporal::Workflow
workflow_id: workflow_id,
run_id: run_id,
reason: 'manual reset',
- workflow_task_event_id: 16
+ workflow_task_event_id: 16,
+ # The request ID will be a random UUID:
+ request_id: instance_of(String),
+ reset_reapply_type: :signal
)
end
end
@@ -528,7 +702,10 @@ class NamespacedWorkflow < Temporal::Workflow
workflow_id: workflow_id,
run_id: run_id,
reason: 'manual reset',
- workflow_task_event_id: 16
+ workflow_task_event_id: 16,
+ # The request ID will be a random UUID:
+ request_id: instance_of(String),
+ reset_reapply_type: :signal
)
end
end
@@ -542,7 +719,10 @@ class NamespacedWorkflow < Temporal::Workflow
workflow_id: workflow_id,
run_id: run_id,
reason: 'manual reset',
- workflow_task_event_id: 4
+ workflow_task_event_id: 4,
+ # The request ID will be a random UUID:
+ request_id: instance_of(String),
+ reset_reapply_type: :signal
)
end
end
@@ -557,7 +737,10 @@ class NamespacedWorkflow < Temporal::Workflow
workflow_id: workflow_id,
run_id: run_id,
reason: 'manual reset',
- workflow_task_event_id: 10
+ workflow_task_event_id: 10,
+ # The request ID will be a random UUID:
+ request_id: instance_of(String),
+ reset_reapply_type: :signal
)
end
end
@@ -574,7 +757,7 @@ class NamespacedWorkflow < Temporal::Workflow
describe '#terminate_workflow' do
let(:temporal_response) do
- Temporal::Api::WorkflowService::V1::TerminateWorkflowExecutionResponse.new
+ Temporalio::Api::WorkflowService::V1::TerminateWorkflowExecutionResponse.new
end
before { allow(connection).to receive(:terminate_workflow_execution).and_return(temporal_response) }
@@ -585,7 +768,7 @@ class NamespacedWorkflow < Temporal::Workflow
expect(connection)
.to have_received(:terminate_workflow_execution)
.with(
- namespace: 'default-namespace',
+ namespace: 'default-test-namespace',
workflow_id: 'my-workflow',
reason: 'just stop it',
details: nil,
@@ -596,11 +779,11 @@ class NamespacedWorkflow < Temporal::Workflow
describe '#fetch_workflow_execution_info' do
let(:response) do
- Temporal::Api::WorkflowService::V1::DescribeWorkflowExecutionResponse.new(
+ Temporalio::Api::WorkflowService::V1::DescribeWorkflowExecutionResponse.new(
workflow_execution_info: api_info
)
end
- let(:api_info) { Fabricate(:api_workflow_execution_info) }
+ let(:api_info) { Fabricate(:api_workflow_execution_info, workflow: 'TestWorkflow', workflow_id: '') }
before { allow(connection).to receive(:describe_workflow_execution).and_return(response) }
@@ -679,4 +862,303 @@ class NamespacedWorkflow < Temporal::Workflow
end
end
end
+
+ describe '#add_custom_search_attributes' do
+ before { allow(connection).to receive(:add_custom_search_attributes) }
+
+ let(:attributes) { { SomeTextField: :text, SomeIntField: :int } }
+
+ it 'passes through to connection' do
+ subject.add_custom_search_attributes(attributes)
+
+ expect(connection)
+ .to have_received(:add_custom_search_attributes)
+ .with(attributes, namespace)
+ end
+ end
+
+ describe '#list_custom_search_attributes' do
+ let(:attributes) { { 'SomeIntField' => :int, 'SomeBoolField' => :bool } }
+
+ before { allow(connection).to receive(:list_custom_search_attributes).and_return(attributes) }
+
+ it 'passes through to connection' do
+ response = subject.list_custom_search_attributes
+
+ expect(response).to eq(attributes)
+
+ expect(connection)
+ .to have_received(:list_custom_search_attributes)
+ end
+ end
+
+ describe '#remove_custom_search_attributes' do
+ before { allow(connection).to receive(:remove_custom_search_attributes) }
+
+ it 'passes through to connection' do
+ subject.remove_custom_search_attributes(:SomeTextField, :SomeIntField)
+
+ expect(connection)
+ .to have_received(:remove_custom_search_attributes)
+ .with(%i[SomeTextField SomeIntField], namespace)
+ end
+ end
+
+ describe '#get_workflow_history' do
+ it 'gets full history with pagination' do
+ completed_event = Fabricate(:workflow_completed_event, result: nil)
+ response_1 = Fabricate(:workflow_execution_history, events: [completed_event], next_page_token: 'a')
+ response_2 = Fabricate(:workflow_execution_history, events: [completed_event], next_page_token: '')
+
+ allow(connection)
+ .to receive(:get_workflow_execution_history)
+ .and_return(response_1, response_2)
+
+ subject.get_workflow_history(namespace: namespace, workflow_id: workflow_id, run_id: run_id)
+
+ expect(connection)
+ .to have_received(:get_workflow_execution_history)
+ .with(namespace: namespace, workflow_id: workflow_id, run_id: run_id, next_page_token: nil)
+ .ordered
+
+ expect(connection)
+ .to have_received(:get_workflow_execution_history)
+ .with(namespace: namespace, workflow_id: workflow_id, run_id: run_id, next_page_token: 'a')
+ .ordered
+
+ expect(connection).to have_received(:get_workflow_execution_history).exactly(2).times
+ end
+ end
+
+ describe '#list_open_workflow_executions' do
+ let(:from) { Time.now - 600 }
+ let(:now) { Time.now }
+ let(:api_execution_info) do
+ Fabricate(:api_workflow_execution_info, workflow: 'TestWorkflow', workflow_id: '')
+ end
+ let(:response) do
+ Temporalio::Api::WorkflowService::V1::ListOpenWorkflowExecutionsResponse.new(
+ executions: [api_execution_info],
+ next_page_token: ''
+ )
+ end
+
+ before do
+ allow(Time).to receive(:now).and_return(now)
+ allow(connection)
+ .to receive(:list_open_workflow_executions)
+ .and_return(response)
+ end
+
+ it 'returns a list of executions' do
+ executions = subject.list_open_workflow_executions(namespace, from)
+ expect(executions.count).to eq(1)
+ expect(executions.first).to be_an_instance_of(Temporal::Workflow::ExecutionInfo)
+ end
+
+ context 'when history is paginated' do
+ let(:response_1) do
+ Temporalio::Api::WorkflowService::V1::ListOpenWorkflowExecutionsResponse.new(
+ executions: [api_execution_info],
+ next_page_token: 'a'
+ )
+ end
+ let(:response_2) do
+ Temporalio::Api::WorkflowService::V1::ListOpenWorkflowExecutionsResponse.new(
+ executions: [api_execution_info],
+ next_page_token: 'b'
+ )
+ end
+ let(:response_3) do
+ Temporalio::Api::WorkflowService::V1::ListOpenWorkflowExecutionsResponse.new(
+ executions: [api_execution_info],
+ next_page_token: ''
+ )
+ end
+
+ before do
+ allow(connection)
+ .to receive(:list_open_workflow_executions)
+ .and_return(response_1, response_2, response_3)
+ end
+
+ it 'calls the API 3 times' do
+ subject.list_open_workflow_executions(namespace, from).count
+
+ expect(connection).to have_received(:list_open_workflow_executions).exactly(3).times
+
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now, next_page_token: nil, max_page_size: nil)
+ .once
+
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now, next_page_token: 'a', max_page_size: nil)
+ .once
+
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now, next_page_token: 'b', max_page_size: nil)
+ .once
+ end
+
+ it 'returns a list of executions' do
+ executions = subject.list_open_workflow_executions(namespace, from)
+
+ expect(executions.count).to eq(3)
+ executions.each do |execution|
+ expect(execution).to be_an_instance_of(Temporal::Workflow::ExecutionInfo)
+ end
+ end
+
+ it 'returns the next page token and paginates correctly' do
+ executions1 = subject.list_open_workflow_executions(namespace, from, max_page_size: 10)
+ executions1.map do |execution|
+ expect(execution).to be_an_instance_of(Temporal::Workflow::ExecutionInfo)
+ end
+ expect(executions1.next_page_token).to eq('a')
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now, next_page_token: nil, max_page_size: 10)
+ .once
+
+ executions2 = subject.list_open_workflow_executions(namespace, from, next_page_token: executions1.next_page_token, max_page_size: 10)
+ executions2.map do |execution|
+ expect(execution).to be_an_instance_of(Temporal::Workflow::ExecutionInfo)
+ end
+ expect(executions2.next_page_token).to eq('b')
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now, next_page_token: 'a', max_page_size: 10)
+ .once
+
+ executions3 = subject.list_open_workflow_executions(namespace, from, next_page_token: executions2.next_page_token, max_page_size: 10)
+ executions3.map do |execution|
+ expect(execution).to be_an_instance_of(Temporal::Workflow::ExecutionInfo)
+ end
+ expect(executions3.next_page_token).to eq('')
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now, next_page_token: 'a', max_page_size: 10)
+ .once
+ end
+
+ it 'returns the next page and paginates correctly' do
+ executions1 = subject.list_open_workflow_executions(namespace, from, max_page_size: 10)
+ executions1.map do |execution|
+ expect(execution).to be_an_instance_of(Temporal::Workflow::ExecutionInfo)
+ end
+ expect(executions1.next_page_token).to eq('a')
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now, next_page_token: nil, max_page_size: 10)
+ .once
+
+ executions2 = executions1.next_page
+ executions2.map do |execution|
+ expect(execution).to be_an_instance_of(Temporal::Workflow::ExecutionInfo)
+ end
+ expect(executions2.next_page_token).to eq('b')
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now, next_page_token: 'a', max_page_size: 10)
+ .once
+
+ executions3 = executions2.next_page
+ executions3.map do |execution|
+ expect(execution).to be_an_instance_of(Temporal::Workflow::ExecutionInfo)
+ end
+ expect(executions3.next_page_token).to eq('')
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now, next_page_token: 'a', max_page_size: 10)
+ .once
+ end
+ end
+
+ context 'when given unsupported filter' do
+ let(:filter) { { foo: :bar } }
+
+ it 'raises ArgumentError' do
+ expect do
+ subject.list_open_workflow_executions(namespace, from, filter: filter).to_a
+ end.to raise_error(ArgumentError, 'Allowed filters are: [:workflow, :workflow_id]')
+ end
+ end
+
+ context 'when given multiple filters' do
+ let(:filter) { { workflow: 'TestWorkflow', workflow_id: 'xxx' } }
+
+ it 'raises ArgumentError' do
+ expect do
+ subject.list_open_workflow_executions(namespace, from, filter: filter).count
+ end.to raise_error(ArgumentError, 'Only one filter is allowed')
+ end
+ end
+
+ context 'when called without filters' do
+ it 'makes a request' do
+ subject.list_open_workflow_executions(namespace, from).to_a
+
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now, next_page_token: nil, max_page_size: nil)
+ end
+ end
+
+ context 'when called with :to' do
+ it 'makes a request' do
+ subject.list_open_workflow_executions(namespace, from, now - 10).to_a
+
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now - 10, next_page_token: nil, max_page_size: nil)
+ end
+ end
+
+ context 'when called with a :workflow filter' do
+ it 'makes a request' do
+ subject.list_open_workflow_executions(namespace, from, filter: { workflow: 'TestWorkflow' }).to_a
+
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now, next_page_token: nil, workflow: 'TestWorkflow', max_page_size: nil)
+ end
+ end
+
+ context 'when called with a :workflow_id filter' do
+ it 'makes a request' do
+ subject.list_open_workflow_executions(namespace, from, filter: { workflow_id: 'xxx' }).to_a
+
+ expect(connection)
+ .to have_received(:list_open_workflow_executions)
+ .with(namespace: namespace, from: from, to: now, next_page_token: nil, workflow_id: 'xxx', max_page_size: nil)
+ end
+ end
+ end
+
+ describe '#count_workflow_executions' do
+ let(:response) do
+ Temporalio::Api::WorkflowService::V1::CountWorkflowExecutionsResponse.new(
+ count: 5
+ )
+ end
+
+ before do
+ allow(connection)
+ .to receive(:count_workflow_executions)
+ .and_return(response)
+ end
+
+ it 'returns the count' do
+ resp = subject.count_workflow_executions(namespace, query: 'ExecutionStatus="Running"')
+
+ expect(connection)
+ .to have_received(:count_workflow_executions)
+ .with(namespace: namespace, query: 'ExecutionStatus="Running"')
+
+ expect(resp).to eq(5)
+ end
+ end
end
diff --git a/spec/unit/lib/temporal/configuration_spec.rb b/spec/unit/lib/temporal/configuration_spec.rb
new file mode 100644
index 00000000..8ab2e282
--- /dev/null
+++ b/spec/unit/lib/temporal/configuration_spec.rb
@@ -0,0 +1,111 @@
+require 'temporal/configuration'
+
+describe Temporal::Configuration do
+ class TestHeaderPropagator
+ def inject!(_); end
+ end
+
+ describe '#initialize' do
+ it 'initializes proper default workflow timeouts' do
+ timeouts = subject.timeouts
+
+ # By default, we don't ever want to timeout workflows, because workflows "always succeed" and
+ # they may be long-running
+ expect(timeouts[:execution]).to be >= 86_400 * 365 * 10
+ expect(timeouts[:run]).to eq(timeouts[:execution])
+ expect(timeouts[:task]).to eq(10)
+ end
+
+ it 'initializes proper default activity timeouts' do
+ timeouts = subject.timeouts
+
+ # Schedule to start timeouts are dangerous because there is no retry.
+ # https://docs.temporal.io/blog/activity-timeouts/#schedule-to-start-timeout recommends to use them rarely
+ expect(timeouts[:schedule_to_start]).to be(nil)
+ # We keep retrying until the workflow times out, by default
+ expect(timeouts[:schedule_to_close]).to be(nil)
+ # Activity invocations should be short-lived by default so they can be retried relatively quickly
+ expect(timeouts[:start_to_close]).to eq(30)
+ # No heartbeating for a default (short-lived) activity
+ expect(timeouts[:heartbeat]).to be(nil)
+ end
+ end
+
+ describe '#add_header_propagator' do
+ let(:header_propagators) { subject.send(:header_propagators) }
+
+ it 'adds middleware entry to the list of middlewares' do
+ subject.add_header_propagator(TestHeaderPropagator)
+ subject.add_header_propagator(TestHeaderPropagator, 'arg1', 'arg2')
+
+ expect(header_propagators.size).to eq(2)
+
+ expect(header_propagators[0]).to be_an_instance_of(Temporal::Middleware::Entry)
+ expect(header_propagators[0].klass).to eq(TestHeaderPropagator)
+ expect(header_propagators[0].args).to eq([])
+
+ expect(header_propagators[1]).to be_an_instance_of(Temporal::Middleware::Entry)
+ expect(header_propagators[1].klass).to eq(TestHeaderPropagator)
+ expect(header_propagators[1].args).to eq(['arg1', 'arg2'])
+ end
+ end
+
+ describe '#for_connection' do
+ let (:new_identity) { 'new_identity' }
+
+ it 'default identity' do
+ expect(subject.for_connection).to have_attributes(identity: "#{Process.pid}@#{`hostname`}")
+ end
+
+ it 'override identity' do
+ subject.identity = new_identity
+ expect(subject.for_connection).to have_attributes(identity: new_identity)
+ end
+ end
+
+ describe '#converter' do
+ it 'wraps the provided converter and codec' do
+ converter_wrapper = subject.converter
+
+ expect(converter_wrapper).to be_a(Temporal::ConverterWrapper)
+ expect(converter_wrapper.send(:converter)).to eq(described_class::DEFAULT_CONVERTER)
+ expect(converter_wrapper.send(:codec)).to eq(described_class::DEFAULT_PAYLOAD_CODEC)
+ end
+ end
+
+ describe '#converter=' do
+ let(:converter) { instance_double(Temporal::Connection::Converter::Composite) }
+
+ it 'resets the wrapper when converter has changed' do
+ old_converter_wrapper = subject.converter
+
+ expect(old_converter_wrapper).to be_a(Temporal::ConverterWrapper)
+ expect(old_converter_wrapper.send(:converter)).to eq(described_class::DEFAULT_CONVERTER)
+
+ subject.converter = converter
+ new_converter_wrapper = subject.converter
+
+ expect(new_converter_wrapper).to be_a(Temporal::ConverterWrapper)
+ expect(new_converter_wrapper.send(:converter)).to eq(converter)
+ expect(new_converter_wrapper.send(:codec)).to eq(old_converter_wrapper.send(:codec))
+ end
+ end
+
+ describe '#payload_codec=' do
+ let(:codec) { Temporal::Connection::Converter::Codec::Base.new }
+
+ it 'resets the wrapper when converter has changed' do
+ old_converter_wrapper = subject.converter
+
+ expect(old_converter_wrapper).to be_a(Temporal::ConverterWrapper)
+ expect(old_converter_wrapper.send(:codec)).to eq(described_class::DEFAULT_PAYLOAD_CODEC)
+
+ subject.payload_codec = codec
+ new_converter_wrapper = subject.converter
+
+ expect(new_converter_wrapper).to be_a(Temporal::ConverterWrapper)
+ expect(new_converter_wrapper.send(:codec)).to eq(codec)
+ expect(new_converter_wrapper.send(:converter)).to eq(old_converter_wrapper.send(:converter))
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/converter/codec/base_spec.rb b/spec/unit/lib/temporal/connection/converter/codec/base_spec.rb
new file mode 100644
index 00000000..22d0eecf
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/converter/codec/base_spec.rb
@@ -0,0 +1,71 @@
+require 'temporal/connection/converter/codec/chain'
+
+describe Temporal::Connection::Converter::Codec::Base do
+ let(:payloads) do
+ Temporalio::Api::Common::V1::Payloads.new(
+ payloads: [
+ Temporalio::Api::Common::V1::Payload.new(
+ metadata: { 'encoding' => 'json/plain' },
+ data: '{}'.b
+ )
+ ]
+ )
+ end
+
+ let (:encoded_payload) do
+ Temporalio::Api::Common::V1::Payload.new(
+ metadata: { 'encoding' => 'binary/encrypted' },
+ data: 'encrypted-payload'.b
+ )
+ end
+
+ let(:base_codec) { described_class.new }
+
+ describe '#encodes' do
+ it 'returns nil if payloads is nil' do
+ expect(base_codec.encodes(nil)).to be_nil
+ end
+
+ it 'encodes each payload in payloads' do
+ expect(base_codec).to receive(:encode).with(payloads.payloads[0]).and_return(encoded_payload)
+ base_codec.encodes(payloads)
+ end
+
+ it 'returns a new Payloads object with the encoded payloads' do
+ encoded_payloads = Temporalio::Api::Common::V1::Payloads.new(
+ payloads: [Temporalio::Api::Common::V1::Payload.new(
+ metadata: { 'encoding' => 'json/plain' },
+ data: 'encoded_payload'.b
+ )]
+ )
+
+ allow(base_codec).to receive(:encode).and_return(encoded_payloads.payloads[0])
+
+ expect(base_codec.encodes(payloads)).to eq(encoded_payloads)
+ end
+ end
+
+ describe '#decodes' do
+ it 'returns nil if payloads is nil' do
+ expect(base_codec.decodes(nil)).to be_nil
+ end
+
+ it 'decodes each payload in payloads' do
+ expect(base_codec).to receive(:decode).with(payloads.payloads[0]).and_return(payloads.payloads[0])
+ base_codec.decodes(payloads)
+ end
+
+ it 'returns a new Payloads object with the decoded payloads' do
+ decoded_payloads = Temporalio::Api::Common::V1::Payloads.new(
+ payloads: [Temporalio::Api::Common::V1::Payload.new(
+ metadata: { 'encoding' => 'json/plain' },
+ data: 'decoded_payload'.b
+ )]
+ )
+
+ allow(base_codec).to receive(:decode).and_return(decoded_payloads.payloads[0])
+
+ expect(base_codec.decodes(payloads)).to eq(decoded_payloads)
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/converter/codec/chain_spec.rb b/spec/unit/lib/temporal/connection/converter/codec/chain_spec.rb
new file mode 100644
index 00000000..77c189db
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/converter/codec/chain_spec.rb
@@ -0,0 +1,60 @@
+require 'temporal/connection/converter/codec/chain'
+
+describe Temporal::Connection::Converter::Codec::Chain do
+ let(:codec1) { double('PayloadCodec1') }
+ let(:codec2) { double('PayloadCodec2') }
+ let(:codec3) { double('PayloadCodec3') }
+
+ let(:payload_1) do
+ Temporalio::Api::Common::V1::Payload.new(
+ metadata: { 'encoding' => 'binary/plain' },
+ data: 'payload_1'.b
+ )
+ end
+ let(:payload_2) do
+ Temporalio::Api::Common::V1::Payload.new(
+ metadata: { 'encoding' => 'binary/plain' },
+ data: 'payload_2'.b
+ )
+ end
+ let(:payload_3) do
+ Temporalio::Api::Common::V1::Payload.new(
+ metadata: { 'encoding' => 'binary/plain' },
+ data: 'payload_3'.b
+ )
+ end
+ let(:payload_4) do
+ Temporalio::Api::Common::V1::Payload.new(
+ metadata: { 'encoding' => 'binary/plain' },
+ data: 'payload_4'.b
+ )
+ end
+
+ subject { described_class.new(payload_codecs: [codec1, codec2, codec3]) }
+
+ describe '#encode' do
+ it 'applies payload codecs in reverse order' do
+ expect(codec3).to receive(:encode).with(payload_1).and_return(payload_2)
+ expect(codec2).to receive(:encode).with(payload_2).and_return(payload_3)
+ expect(codec1).to receive(:encode).with(payload_3).and_return(payload_4)
+
+ result = subject.encode(payload_1)
+
+ expect(result.metadata).to eq(payload_4.metadata)
+ expect(result.data).to eq(payload_4.data)
+ end
+ end
+
+ describe '#decode' do
+ it 'applies payload codecs in the original order' do
+ expect(codec1).to receive(:decode).with(payload_1).and_return(payload_2)
+ expect(codec2).to receive(:decode).with(payload_2).and_return(payload_3)
+ expect(codec3).to receive(:decode).with(payload_3).and_return(payload_4)
+
+ result = subject.decode(payload_1)
+
+ expect(result.metadata).to eq(payload_4.metadata)
+ expect(result.data).to eq(payload_4.data)
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/converter/composite_spec.rb b/spec/unit/lib/temporal/connection/converter/composite_spec.rb
index 9f74393b..b78a62c0 100644
--- a/spec/unit/lib/temporal/connection/converter/composite_spec.rb
+++ b/spec/unit/lib/temporal/connection/converter/composite_spec.rb
@@ -10,11 +10,11 @@
describe 'encoding' do
it 'tries converters until it finds a match' do
payloads = [
- Temporal::Api::Common::V1::Payload.new(
+ Temporalio::Api::Common::V1::Payload.new(
metadata: { 'encoding' => Temporal::Connection::Converter::Payload::Bytes::ENCODING },
data: 'test'.b
),
- Temporal::Api::Common::V1::Payload.new(
+ Temporalio::Api::Common::V1::Payload.new(
metadata: { 'encoding' => Temporal::Connection::Converter::Payload::JSON::ENCODING },
data: '"test"'
),
@@ -32,11 +32,11 @@
describe 'decoding' do
it 'uses metadata to pick a converter' do
payloads = [
- Temporal::Api::Common::V1::Payload.new(
+ Temporalio::Api::Common::V1::Payload.new(
metadata: { 'encoding' => Temporal::Connection::Converter::Payload::Bytes::ENCODING },
data: 'test'.b
),
- Temporal::Api::Common::V1::Payload.new(
+ Temporalio::Api::Common::V1::Payload.new(
metadata: { 'encoding' => Temporal::Connection::Converter::Payload::JSON::ENCODING },
data: '"test"'
),
@@ -50,11 +50,15 @@
end
it 'raises if there is no converter for an encoding' do
- payload = Temporal::Api::Common::V1::Payload.new(
+ payload = Temporalio::Api::Common::V1::Payload.new(
metadata: { 'encoding' => 'fake' }
)
- expect { subject.from_payload(payload) }.to raise_error(Temporal::Connection::Converter::Composite::ConverterNotFound)
+ expect do
+ subject.from_payload(payload)
+ end.to raise_error(Temporal::Connection::Converter::Composite::ConverterNotFound) do |e|
+ expect(e.message).to eq('Could not find PayloadConverter for fake')
+ end
end
end
end
diff --git a/spec/unit/lib/temporal/connection/converter/payload/bytes_spec.rb b/spec/unit/lib/temporal/connection/converter/payload/bytes_spec.rb
index 8a9391fb..e8fe42ff 100644
--- a/spec/unit/lib/temporal/connection/converter/payload/bytes_spec.rb
+++ b/spec/unit/lib/temporal/connection/converter/payload/bytes_spec.rb
@@ -5,7 +5,7 @@
describe 'round trip' do
it 'encodes to a binary/plain payload' do
- payload = Temporal::Api::Common::V1::Payload.new(
+ payload = Temporalio::Api::Common::V1::Payload.new(
metadata: { 'encoding' => described_class::ENCODING },
data: 'test'.b
)
@@ -14,7 +14,7 @@
end
it 'decodes a binary/plain payload to a byte string' do
- payload = Temporal::Api::Common::V1::Payload.new(
+ payload = Temporalio::Api::Common::V1::Payload.new(
metadata: { 'encoding' => described_class::ENCODING },
data: 'test'.b
)
diff --git a/spec/unit/lib/temporal/connection/converter/payload/nil_spec.rb b/spec/unit/lib/temporal/connection/converter/payload/nil_spec.rb
index 3779d27b..78e8d7f7 100644
--- a/spec/unit/lib/temporal/connection/converter/payload/nil_spec.rb
+++ b/spec/unit/lib/temporal/connection/converter/payload/nil_spec.rb
@@ -4,7 +4,7 @@
subject { described_class.new }
it 'encodes a null payload' do
- payload = Temporal::Api::Common::V1::Payload.new(
+ payload = Temporalio::Api::Common::V1::Payload.new(
metadata: { 'encoding' => described_class::ENCODING }
)
@@ -12,7 +12,7 @@
end
it 'decodes a null payload' do
- payload = Temporal::Api::Common::V1::Payload.new(
+ payload = Temporalio::Api::Common::V1::Payload.new(
metadata: { 'encoding' => described_class::ENCODING }
)
diff --git a/spec/unit/lib/temporal/connection/converter/payload/proto_json_spec.rb b/spec/unit/lib/temporal/connection/converter/payload/proto_json_spec.rb
new file mode 100644
index 00000000..589c921f
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/converter/payload/proto_json_spec.rb
@@ -0,0 +1,32 @@
+require 'temporal/connection/converter/payload/proto_json'
+
+describe Temporal::Connection::Converter::Payload::ProtoJSON do
+ subject { described_class.new }
+
+ describe 'round trip' do
+ it 'converts' do
+ # Temporalio::Api::Common::V1::Payload is a protobuf.
+ # Using it as the "input" here to show the roundtrip.
+ # #to_payload will return a wrapped Payload around this one.
+ input = Temporalio::Api::Common::V1::Payload.new(
+ metadata: { 'hello' => 'world' },
+ data: 'hello world',
+ )
+
+ expect(subject.from_payload(subject.to_payload(input))).to eq(input)
+ end
+
+ it 'encodes special characters' do
+ input = Temporalio::Api::Common::V1::Payload.new(
+ metadata: { 'it’ll work!' => 'bytebytebyte' },
+ )
+ expect(subject.from_payload(subject.to_payload(input))).to eq(input)
+ end
+ end
+
+ it 'skips if not proto message' do
+ input = { hello: 'world' }
+
+ expect(subject.to_payload(input)).to be nil
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/retryer.rb b/spec/unit/lib/temporal/connection/retryer_spec.rb
similarity index 100%
rename from spec/unit/lib/temporal/connection/retryer.rb
rename to spec/unit/lib/temporal/connection/retryer_spec.rb
diff --git a/spec/unit/lib/temporal/connection/serializer/backfill_spec.rb b/spec/unit/lib/temporal/connection/serializer/backfill_spec.rb
new file mode 100644
index 00000000..b4505a57
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/serializer/backfill_spec.rb
@@ -0,0 +1,38 @@
+require "temporal/connection/errors"
+require "temporal/schedule/backfill"
+require "temporal/connection/serializer/backfill"
+
+describe Temporal::Connection::Serializer::Backfill do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+ let(:example_backfill) do
+ Temporal::Schedule::Backfill.new(
+ start_time: Time.new(2000, 1, 1, 0, 0, 0),
+ end_time: Time.new(2031, 1, 1, 0, 0, 0),
+ overlap_policy: :buffer_all
+ )
+ end
+
+ describe "to_proto" do
+ it "raises an error if an invalid overlap_policy is specified" do
+ invalid = Temporal::Schedule::Backfill.new(overlap_policy: :foobar)
+ expect do
+ described_class.new(invalid, converter).to_proto
+ end
+ .to(raise_error(Temporal::Connection::ArgumentError, "Unknown schedule overlap policy specified: foobar"))
+ end
+
+ it "produces well-formed protobuf" do
+ result = described_class.new(example_backfill, converter).to_proto
+
+ expect(result).to(be_a(Temporalio::Api::Schedule::V1::BackfillRequest))
+ expect(result.overlap_policy).to(eq(:SCHEDULE_OVERLAP_POLICY_BUFFER_ALL))
+ expect(result.start_time.to_time).to(eq(example_backfill.start_time))
+ expect(result.end_time.to_time).to(eq(example_backfill.end_time))
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/serializer/continue_as_new_spec.rb b/spec/unit/lib/temporal/connection/serializer/continue_as_new_spec.rb
index fbb00623..398231da 100644
--- a/spec/unit/lib/temporal/connection/serializer/continue_as_new_spec.rb
+++ b/spec/unit/lib/temporal/connection/serializer/continue_as_new_spec.rb
@@ -2,21 +2,52 @@
require 'temporal/workflow/command'
describe Temporal::Connection::Serializer::ContinueAsNew do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+
describe 'to_proto' do
it 'produces a protobuf' do
+ timeouts = {
+ execution: 1000,
+ run: 100,
+ task: 10
+ }
command = Temporal::Workflow::Command::ContinueAsNew.new(
- workflow_type: 'Test',
- task_queue: 'Test',
+ workflow_type: 'my-workflow-type',
+ task_queue: 'my-task-queue',
input: ['one', 'two'],
- timeouts: Temporal.configuration.timeouts
+ timeouts: timeouts,
+ headers: {'foo-header': 'bar'},
+ memo: {'foo-memo': 'baz'},
+ search_attributes: {'foo-search-attribute': 'qux'},
)
- result = described_class.new(command).to_proto
+ result = described_class.new(command, converter).to_proto
- expect(result).to be_an_instance_of(Temporal::Api::Command::V1::Command)
+ expect(result).to be_an_instance_of(Temporalio::Api::Command::V1::Command)
expect(result.command_type).to eql(
:COMMAND_TYPE_CONTINUE_AS_NEW_WORKFLOW_EXECUTION
)
+ expect(result.continue_as_new_workflow_execution_command_attributes).not_to be_nil
+ attribs = result.continue_as_new_workflow_execution_command_attributes
+
+ expect(attribs.workflow_type.name).to eq('my-workflow-type')
+
+ expect(attribs.task_queue.name).to eq('my-task-queue')
+
+ expect(attribs.input.payloads[0].data).to eq('"one"')
+ expect(attribs.input.payloads[1].data).to eq('"two"')
+
+ expect(attribs.header.fields['foo-header'].data).to eq('"bar"')
+ expect(attribs.memo.fields['foo-memo'].data).to eq('"baz"')
+ expect(attribs.search_attributes.indexed_fields['foo-search-attribute'].data).to eq('"qux"')
+
+ expect(attribs.workflow_run_timeout.seconds).to eq(timeouts[:run])
+ expect(attribs.workflow_task_timeout.seconds).to eq(timeouts[:task])
end
end
end
diff --git a/spec/unit/lib/temporal/connection/serializer/failure_spec.rb b/spec/unit/lib/temporal/connection/serializer/failure_spec.rb
index cff68c52..2bde0337 100644
--- a/spec/unit/lib/temporal/connection/serializer/failure_spec.rb
+++ b/spec/unit/lib/temporal/connection/serializer/failure_spec.rb
@@ -2,11 +2,109 @@
require 'temporal/workflow/command'
describe Temporal::Connection::Serializer::Failure do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+
describe 'to_proto' do
it 'produces a protobuf' do
- result = described_class.new(StandardError.new('test')).to_proto
+ result = described_class.new(StandardError.new('test'), converter).to_proto
+
+ expect(result).to be_an_instance_of(Temporalio::Api::Failure::V1::Failure)
+ end
+
+ class NaughtyClass; end
+
+ class MyError < StandardError
+ attr_reader :foo, :bad_class
+
+ def initialize(foo, bar, bad_class:)
+ @foo = foo
+ @bad_class = bad_class
- expect(result).to be_an_instance_of(Temporal::Api::Failure::V1::Failure)
+ # Ensure that we serialize derived properties.
+ my_message = "Hello, #{bar}!"
+ super(my_message)
+ end
end
+
+ it 'Serializes round-trippable full errors when asked to' do
+ # Make sure serializing various bits round-trips
+ e = MyError.new(['seven', 'three'], "Bar", bad_class: NaughtyClass)
+ failure_proto = described_class.new(e, converter, serialize_whole_error: true).to_proto
+ expect(failure_proto.application_failure_info.type).to eq("MyError")
+
+ deserialized_error = converter.from_details_payloads(failure_proto.application_failure_info.details)
+ expect(deserialized_error).to be_an_instance_of(MyError)
+ expect(deserialized_error.message).to eq("Hello, Bar!")
+ expect(deserialized_error.foo).to eq(['seven', 'three'])
+ expect(deserialized_error.bad_class).to eq(NaughtyClass)
+ end
+
+ class MyBigError < StandardError
+ attr_reader :big_payload
+ def initialize(message)
+ super(message)
+ @big_payload = '123456789012345678901234567890123456789012345678901234567890'
+ end
+ end
+
+
+ it 'deals with too-large serialization using the old path' do
+ e = MyBigError.new('Uh oh!')
+ # Normal serialization path
+ failure_proto = described_class.new(e, converter, serialize_whole_error: true, max_bytes: 1000).to_proto
+ expect(failure_proto.application_failure_info.type).to eq('MyBigError')
+ deserialized_error = converter.from_details_payloads(failure_proto.application_failure_info.details)
+ expect(deserialized_error).to be_an_instance_of(MyBigError)
+ expect(deserialized_error.big_payload).to eq('123456789012345678901234567890123456789012345678901234567890')
+
+ # Exercise legacy serialization mechanism
+ failure_proto = described_class.new(e, converter, serialize_whole_error: false).to_proto
+ expect(failure_proto.application_failure_info.type).to eq('MyBigError')
+ old_style_deserialized_error = MyBigError.new(converter.from_details_payloads(failure_proto.application_failure_info.details))
+ expect(old_style_deserialized_error).to be_an_instance_of(MyBigError)
+ expect(old_style_deserialized_error.message).to eq('Uh oh!')
+
+ # If the payload size exceeds the max_bytes, we fallback to the old-style serialization.
+ failure_proto = described_class.new(e, converter, serialize_whole_error: true, max_bytes: 50).to_proto
+ expect(failure_proto.application_failure_info.type).to eq('MyBigError')
+ avoids_truncation_error = MyBigError.new(converter.from_details_payloads(failure_proto.application_failure_info.details))
+ expect(avoids_truncation_error).to be_an_instance_of(MyBigError)
+ expect(avoids_truncation_error.message).to eq('Uh oh!')
+
+ # Fallback serialization should exactly match legacy serialization
+ expect(avoids_truncation_error).to eq(old_style_deserialized_error)
+ end
+
+ it 'logs a helpful error when the payload is too large' do
+ e = MyBigError.new('Uh oh!')
+
+ allow(Temporal.logger).to receive(:error)
+ max_bytes = 50
+ described_class.new(e, converter, serialize_whole_error: true, max_bytes: max_bytes).to_proto
+ expect(Temporal.logger)
+ .to have_received(:error)
+ .with(
+ "Could not serialize exception because it's too large, so we are using a fallback that may not deserialize "\
+ "correctly on the client. First #{max_bytes} bytes:\n{\"^o\":\"MyBigError\",\"big_payload\":\"1234567890123456",
+ { unserializable_error: 'MyBigError' }
+ )
+
+ end
+
+ class MyArglessError < RuntimeError
+ def initialize; end
+ end
+
+ it 'successfully processes an error with no constructor arguments' do
+ e = MyArglessError.new
+ failure_proto = described_class.new(e, converter, serialize_whole_error: true).to_proto
+ expect(failure_proto.application_failure_info.type).to eq('MyArglessError')
+ end
+
end
end
diff --git a/spec/unit/lib/temporal/connection/serializer/query_answer_spec.rb b/spec/unit/lib/temporal/connection/serializer/query_answer_spec.rb
new file mode 100644
index 00000000..5e912206
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/serializer/query_answer_spec.rb
@@ -0,0 +1,25 @@
+require 'temporal/connection/serializer/query_failure'
+require 'temporal/workflow/query_result'
+
+describe Temporal::Connection::Serializer::QueryAnswer do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+
+ describe 'to_proto' do
+ let(:query_result) { Temporal::Workflow::QueryResult.answer(42) }
+
+ it 'produces a protobuf' do
+ result = described_class.new(query_result, converter).to_proto
+
+ expect(result).to be_a(Temporalio::Api::Query::V1::WorkflowQueryResult)
+ expect(result.result_type).to eq(Temporalio::Api::Enums::V1::QueryResultType.lookup(
+ Temporalio::Api::Enums::V1::QueryResultType::QUERY_RESULT_TYPE_ANSWERED)
+ )
+ expect(result.answer).to eq(converter.to_query_payloads(42))
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/serializer/query_failure_spec.rb b/spec/unit/lib/temporal/connection/serializer/query_failure_spec.rb
new file mode 100644
index 00000000..62926aea
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/serializer/query_failure_spec.rb
@@ -0,0 +1,26 @@
+require 'temporal/connection/serializer/query_failure'
+require 'temporal/workflow/query_result'
+
+describe Temporal::Connection::Serializer::QueryFailure do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+
+ describe 'to_proto' do
+ let(:exception) { StandardError.new('Test query failure') }
+ let(:query_result) { Temporal::Workflow::QueryResult.failure(exception) }
+
+ it 'produces a protobuf' do
+ result = described_class.new(query_result, converter).to_proto
+
+ expect(result).to be_a(Temporalio::Api::Query::V1::WorkflowQueryResult)
+ expect(result.result_type).to eq(Temporalio::Api::Enums::V1::QueryResultType.lookup(
+ Temporalio::Api::Enums::V1::QueryResultType::QUERY_RESULT_TYPE_FAILED)
+ )
+ expect(result.error_message).to eq('Test query failure')
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/serializer/retry_policy_spec.rb b/spec/unit/lib/temporal/connection/serializer/retry_policy_spec.rb
index 211f807f..5e27503f 100644
--- a/spec/unit/lib/temporal/connection/serializer/retry_policy_spec.rb
+++ b/spec/unit/lib/temporal/connection/serializer/retry_policy_spec.rb
@@ -2,6 +2,13 @@
require 'temporal/connection/serializer/retry_policy'
describe Temporal::Connection::Serializer::RetryPolicy do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+
describe 'to_proto' do
let(:example_policy) do
Temporal::RetryPolicy.new(
@@ -14,7 +21,7 @@
end
it 'converts to proto' do
- proto = described_class.new(example_policy).to_proto
+ proto = described_class.new(example_policy, converter).to_proto
expect(proto.initial_interval.seconds).to eq(1)
expect(proto.backoff_coefficient).to eq(1.5)
expect(proto.maximum_interval.seconds).to eq(5)
diff --git a/spec/unit/lib/temporal/connection/serializer/schedule_action_spec.rb b/spec/unit/lib/temporal/connection/serializer/schedule_action_spec.rb
new file mode 100644
index 00000000..93f9e87c
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/serializer/schedule_action_spec.rb
@@ -0,0 +1,56 @@
+require "temporal/connection/errors"
+require "temporal/schedule/start_workflow_action"
+require "temporal/connection/serializer/schedule_action"
+
+describe Temporal::Connection::Serializer::ScheduleAction do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+ let(:timeouts) { {run: 100, task: 10} }
+
+ let(:example_action) do
+ Temporal::Schedule::StartWorkflowAction.new(
+ "HelloWorldWorkflow",
+ "one",
+ "two",
+ options: {
+ workflow_id: "foobar",
+ task_queue: "my-task-queue",
+ timeouts: timeouts,
+ memo: {:"foo-memo" => "baz"},
+ search_attributes: {:"foo-search-attribute" => "qux"},
+ headers: {:"foo-header" => "bar"}
+ }
+ )
+ end
+
+ describe "to_proto" do
+ it "raises an error if an invalid action is specified" do
+ expect do
+ described_class.new(123, converter).to_proto
+ end
+ .to(raise_error(Temporal::Connection::ArgumentError)) do |e|
+ expect(e.message).to(eq("Unknown action type Integer"))
+ end
+ end
+
+ it "produces well-formed protobuf" do
+ result = described_class.new(example_action, converter).to_proto
+
+ expect(result).to(be_a(Temporalio::Api::Schedule::V1::ScheduleAction))
+
+ action = result.start_workflow
+ expect(action).to(be_a(Temporalio::Api::Workflow::V1::NewWorkflowExecutionInfo))
+ expect(action.task_queue.name).to(eq("my-task-queue"))
+ expect(action.input.payloads.map(&:data)).to(eq(["\"one\"", "\"two\""]))
+ expect(action.header.fields["foo-header"].data).to(eq("\"bar\""))
+ expect(action.memo.fields["foo-memo"].data).to(eq("\"baz\""))
+ expect(action.search_attributes.indexed_fields["foo-search-attribute"].data).to(eq("\"qux\""))
+ expect(action.workflow_run_timeout.seconds).to(eq(timeouts[:run]))
+ expect(action.workflow_task_timeout.seconds).to(eq(timeouts[:task]))
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/serializer/schedule_policies_spec.rb b/spec/unit/lib/temporal/connection/serializer/schedule_policies_spec.rb
new file mode 100644
index 00000000..2b51cee3
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/serializer/schedule_policies_spec.rb
@@ -0,0 +1,37 @@
+require "temporal/schedule/schedule_policies"
+require "temporal/connection/serializer/schedule_policies"
+
+describe Temporal::Connection::Serializer::SchedulePolicies do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+ let(:example_policies) do
+ Temporal::Schedule::SchedulePolicies.new(
+ overlap_policy: :buffer_one,
+ catchup_window: 600,
+ pause_on_failure: true
+ )
+ end
+
+ describe "to_proto" do
+ it "produces well-formed protobuf" do
+ result = described_class.new(example_policies, converter).to_proto
+
+ expect(result).to(be_a(Temporalio::Api::Schedule::V1::SchedulePolicies))
+ expect(result.overlap_policy).to(eq(:SCHEDULE_OVERLAP_POLICY_BUFFER_ONE))
+ expect(result.catchup_window.seconds).to(eq(600))
+ expect(result.pause_on_failure).to(eq(true))
+ end
+
+ it "should raise if an unknown overlap policy is specified" do
+ invalid_policies = Temporal::Schedule::SchedulePolicies.new(overlap_policy: :foobar)
+ expect do
+ described_class.new(invalid_policies, converter).to_proto
+ end
+ .to(raise_error(Temporal::Connection::ArgumentError, "Unknown schedule overlap policy specified: foobar"))
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/serializer/schedule_spec_spec.rb b/spec/unit/lib/temporal/connection/serializer/schedule_spec_spec.rb
new file mode 100644
index 00000000..ee0cd0f8
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/serializer/schedule_spec_spec.rb
@@ -0,0 +1,63 @@
+require "temporal/schedule/schedule_spec"
+require "temporal/schedule/interval"
+require "temporal/schedule/calendar"
+require "temporal/connection/serializer/schedule_spec"
+
+describe Temporal::Connection::Serializer::ScheduleSpec do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+ let(:example_spec) do
+ Temporal::Schedule::ScheduleSpec.new(
+ cron_expressions: ["@hourly"],
+ intervals: [
+ Temporal::Schedule::Interval.new(every: 50, offset: 30),
+ Temporal::Schedule::Interval.new(every: 60)
+ ],
+ calendars: [
+ Temporal::Schedule::Calendar.new(
+ hour: "7",
+ minute: "0,3,15",
+ day_of_week: "MONDAY",
+ month: "1-6",
+ comment: "some comment explaining intent"
+ ),
+ Temporal::Schedule::Calendar.new(
+ minute: "8",
+ hour: "*"
+ )
+ ],
+ start_time: Time.new(2000, 1, 1, 0, 0, 0),
+ end_time: Time.new(2031, 1, 1, 0, 0, 0),
+ jitter: 500,
+ timezone_name: "America/New_York"
+ )
+ end
+
+ describe "to_proto" do
+ it "produces well-formed protobuf" do
+ result = described_class.new(example_spec, converter).to_proto
+
+ expect(result).to(be_a(Temporalio::Api::Schedule::V1::ScheduleSpec))
+ expect(result.cron_string).to(eq(["@hourly"]))
+ expect(result.interval[0].interval.seconds).to(eq(50))
+ expect(result.interval[0].phase.seconds).to(eq(30))
+ expect(result.interval[1].interval.seconds).to(eq(60))
+ expect(result.interval[1].phase).to(be_nil)
+ expect(result.calendar[0].hour).to(eq("7"))
+ expect(result.calendar[0].minute).to(eq("0,3,15"))
+ expect(result.calendar[0].day_of_week).to(eq("MONDAY"))
+ expect(result.calendar[0].month).to(eq("1-6"))
+ expect(result.calendar[0].comment).to(eq("some comment explaining intent"))
+ expect(result.calendar[1].hour).to(eq("*"))
+ expect(result.calendar[1].minute).to(eq("8"))
+ expect(result.start_time.to_time).to(eq(example_spec.start_time))
+ expect(result.end_time.to_time).to(eq(example_spec.end_time))
+ expect(result.jitter.seconds).to(eq(500))
+ expect(result.timezone_name).to(eq("America/New_York"))
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/serializer/schedule_state_spec.rb b/spec/unit/lib/temporal/connection/serializer/schedule_state_spec.rb
new file mode 100644
index 00000000..3fbe8051
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/serializer/schedule_state_spec.rb
@@ -0,0 +1,31 @@
+require "temporal/schedule/schedule_state"
+require "temporal/connection/serializer/schedule_state"
+
+describe Temporal::Connection::Serializer::ScheduleState do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+ let(:example_state) do
+ Temporal::Schedule::ScheduleState.new(
+ notes: "some notes",
+ paused: true,
+ limited_actions: true,
+ remaining_actions: 500
+ )
+ end
+
+ describe "to_proto" do
+ it "produces well-formed protobuf" do
+ result = described_class.new(example_state, converter).to_proto
+
+ expect(result).to(be_a(Temporalio::Api::Schedule::V1::ScheduleState))
+ expect(result.notes).to(eq("some notes"))
+ expect(result.paused).to(eq(true))
+ expect(result.limited_actions).to(eq(true))
+ expect(result.remaining_actions).to(eq(500))
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/serializer/start_child_workflow_spec.rb b/spec/unit/lib/temporal/connection/serializer/start_child_workflow_spec.rb
new file mode 100644
index 00000000..ae26f88f
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/serializer/start_child_workflow_spec.rb
@@ -0,0 +1,55 @@
+require 'temporal/connection/errors'
+require 'temporal/workflow/command'
+require 'temporal/connection/serializer/start_child_workflow'
+
+describe Temporal::Connection::Serializer::StartChildWorkflow do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+ let(:example_command) do
+ Temporal::Workflow::Command::StartChildWorkflow.new(
+ workflow_id: SecureRandom.uuid,
+ workflow_type: '',
+ input: nil,
+ namespace: '',
+ task_queue: '',
+ retry_policy: nil,
+ timeouts: { execution: 1, run: 1, task: 1 },
+ headers: nil,
+ memo: {},
+ search_attributes: {},
+ )
+ end
+
+ describe 'to_proto' do
+ it 'raises an error if an invalid parent_close_policy is specified' do
+ command = example_command
+ command.parent_close_policy = :invalid
+
+ expect do
+ described_class.new(command, converter).to_proto
+ end.to raise_error(Temporal::Connection::ArgumentError) do |e|
+ expect(e.message).to eq("Unknown parent_close_policy '#{command.parent_close_policy}' specified")
+ end
+ end
+
+ {
+ nil => :PARENT_CLOSE_POLICY_UNSPECIFIED,
+ :terminate => :PARENT_CLOSE_POLICY_TERMINATE,
+ :abandon => :PARENT_CLOSE_POLICY_ABANDON,
+ :request_cancel => :PARENT_CLOSE_POLICY_REQUEST_CANCEL,
+ }.each do |policy_name, expected_parent_close_policy|
+ it "successfully resolves a parent_close_policy of #{policy_name}" do
+ command = example_command
+ command.parent_close_policy = policy_name
+
+ result = described_class.new(command, converter).to_proto
+ attribs = result.start_child_workflow_execution_command_attributes
+ expect(attribs.parent_close_policy).to eq(expected_parent_close_policy)
+ end
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/serializer/upsert_search_attributes_spec.rb b/spec/unit/lib/temporal/connection/serializer/upsert_search_attributes_spec.rb
new file mode 100644
index 00000000..5bdace1a
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/serializer/upsert_search_attributes_spec.rb
@@ -0,0 +1,39 @@
+require 'securerandom'
+require 'time'
+require 'temporal/connection/serializer/upsert_search_attributes'
+require 'temporal/workflow/command'
+
+describe Temporal::Connection::Serializer::UpsertSearchAttributes do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+
+ it 'produces a protobuf that round-trips' do
+ expected_attributes = {
+ 'CustomStringField' => 'moo',
+ 'CustomBoolField' => true,
+ 'CustomDoubleField' => 3.14,
+ 'CustomIntField' => 0,
+ 'CustomKeywordField' => SecureRandom.uuid,
+ 'CustomDatetimeField' => Time.now.to_i
+ }
+
+ command = Temporal::Workflow::Command::UpsertSearchAttributes.new(
+ search_attributes: expected_attributes
+ )
+
+ result = described_class.new(command, converter).to_proto
+ expect(result).to be_an_instance_of(Temporalio::Api::Command::V1::Command)
+ expect(result.command_type).to eql(
+ :COMMAND_TYPE_UPSERT_WORKFLOW_SEARCH_ATTRIBUTES
+ )
+ command_attributes = result.upsert_workflow_search_attributes_command_attributes
+ expect(command_attributes).not_to be_nil
+ actual_attributes = converter.from_payload_map_without_codec(command_attributes&.search_attributes&.indexed_fields)
+ expect(actual_attributes).to eql(expected_attributes)
+
+ end
+end
diff --git a/spec/unit/lib/temporal/connection/serializer/workflow_id_reuse_policy_spec.rb b/spec/unit/lib/temporal/connection/serializer/workflow_id_reuse_policy_spec.rb
new file mode 100644
index 00000000..b1ee6cad
--- /dev/null
+++ b/spec/unit/lib/temporal/connection/serializer/workflow_id_reuse_policy_spec.rb
@@ -0,0 +1,39 @@
+require 'temporal/retry_policy'
+require 'temporal/connection/serializer/retry_policy'
+
+describe Temporal::Connection::Serializer::WorkflowIdReusePolicy do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+
+ describe 'to_proto' do
+ SYM_TO_PROTO = {
+ allow_failed: Temporalio::Api::Enums::V1::WorkflowIdReusePolicy::WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE_FAILED_ONLY,
+ allow: Temporalio::Api::Enums::V1::WorkflowIdReusePolicy::WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE,
+ reject: Temporalio::Api::Enums::V1::WorkflowIdReusePolicy::WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE,
+ terminate_if_running: Temporalio::Api::Enums::V1::WorkflowIdReusePolicy::WORKFLOW_ID_REUSE_POLICY_TERMINATE_IF_RUNNING
+ }.freeze
+
+ def self.test_valid_policy(policy_sym)
+ it "serializes #{policy_sym}" do
+ proto_enum = described_class.new(policy_sym, converter).to_proto
+ expected = SYM_TO_PROTO[policy_sym]
+ expect(proto_enum).to eq(expected)
+ end
+ end
+
+ test_valid_policy(:allow)
+ test_valid_policy(:allow_failed)
+ test_valid_policy(:reject)
+ test_valid_policy(:terminate_if_running)
+
+ it "rejects invalid policies" do
+ expect do
+ described_class.new(:not_a_valid_policy, converter).to_proto
+ end.to raise_error(Temporal::Connection::ArgumentError, 'Unknown workflow_id_reuse_policy specified: not_a_valid_policy')
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/connection_spec.rb b/spec/unit/lib/temporal/connection_spec.rb
new file mode 100644
index 00000000..a3e5642f
--- /dev/null
+++ b/spec/unit/lib/temporal/connection_spec.rb
@@ -0,0 +1,70 @@
+describe Temporal::Connection do
+ subject { described_class.generate(config.for_connection) }
+
+ let(:connection_type) { :grpc }
+ let(:credentials) { nil }
+ let(:config) do
+ config = Temporal::Configuration.new
+ config.connection_type = connection_type
+ config.credentials = credentials if credentials
+ config
+ end
+
+ context 'identity' do
+ let(:identity) { 'my_identity' }
+ it 'overrides' do
+ config.identity = identity
+ expect(subject.send(:identity)).to eq(identity)
+ end
+ end
+
+ context 'insecure' do
+ let(:credentials) { :this_channel_is_insecure }
+
+ it 'generates a grpc connection' do
+ expect(subject).to be_kind_of(Temporal::Connection::GRPC)
+ expect(subject.send(:identity)).not_to be_nil
+ expect(subject.send(:credentials)).to eq(:this_channel_is_insecure)
+ expect(subject.send(:converter)).to eq(config.converter)
+ end
+ end
+
+ context 'ssl' do
+ let(:credentials) { GRPC::Core::ChannelCredentials.new }
+
+ it 'generates a grpc connection' do
+ expect(subject).to be_kind_of(Temporal::Connection::GRPC)
+ expect(subject.send(:identity)).not_to be_nil
+ expect(subject.send(:credentials)).to be_kind_of(GRPC::Core::ChannelCredentials)
+ expect(subject.send(:converter)).to eq(config.converter)
+ end
+ end
+
+ context 'oauth2' do
+ let(:credentials) { GRPC::Core::CallCredentials.new(proc { { authorization: 'token' } }) }
+
+ it 'generates a grpc connection' do
+ expect(subject).to be_kind_of(Temporal::Connection::GRPC)
+ expect(subject.send(:identity)).not_to be_nil
+ expect(subject.send(:credentials)).to be_kind_of(GRPC::Core::CallCredentials)
+ expect(subject.send(:converter)).to eq(config.converter)
+ end
+ end
+
+ context 'ssl + oauth2' do
+ let(:credentials) do
+ GRPC::Core::ChannelCredentials.new.compose(
+ GRPC::Core::CallCredentials.new(
+ proc { { authorization: 'token' } }
+ )
+ )
+ end
+
+ it 'generates a grpc connection' do
+ expect(subject).to be_kind_of(Temporal::Connection::GRPC)
+ expect(subject.send(:identity)).not_to be_nil
+ expect(subject.send(:credentials)).to be_kind_of(GRPC::Core::ChannelCredentials)
+ expect(subject.send(:converter)).to eq(config.converter)
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/converter_wrapper_spec.rb b/spec/unit/lib/temporal/converter_wrapper_spec.rb
new file mode 100644
index 00000000..f5b06af4
--- /dev/null
+++ b/spec/unit/lib/temporal/converter_wrapper_spec.rb
@@ -0,0 +1,175 @@
+require 'temporal/converter_wrapper'
+require 'temporal/connection/converter/payload/bytes'
+require 'temporal/connection/converter/payload/nil'
+require 'temporal/connection/converter/composite'
+
+describe Temporal::ConverterWrapper do
+ class TestCodec < Temporal::Connection::Converter::Codec::Base
+ def encode(payload)
+ return payload
+ end
+
+ def decode(payload)
+ return payload
+ end
+ end
+
+ subject { described_class.new(converter, codec) }
+ let(:converter) do
+ Temporal::Connection::Converter::Composite.new(payload_converters: [
+ Temporal::Connection::Converter::Payload::Bytes.new,
+ Temporal::Connection::Converter::Payload::Nil.new
+ ])
+ end
+ let(:codec) { Temporal::Connection::Converter::Codec::Chain.new(payload_codecs: [TestCodec.new]) }
+ let(:payloads) { Fabricate(:api_payloads, payloads_array: [payload_bytes, payload_nil]) }
+ let(:payload_bytes) { Fabricate(:api_payload_bytes, bytes: 'test-payload') }
+ let(:payload_nil) { Fabricate(:api_payload_nil) }
+
+ before do
+ allow(codec).to receive(:encode).and_call_original
+ allow(codec).to receive(:encodes).and_call_original
+ allow(codec).to receive(:decode).and_call_original
+ allow(codec).to receive(:decodes).and_call_original
+ end
+
+ describe '#from_payloads' do
+ it 'decodes and converts' do
+ expect(subject.from_payloads(payloads)).to eq(['test-payload', nil])
+ expect(codec).to have_received(:decodes)
+ end
+ end
+
+ describe '#from_payload' do
+ it 'decodes and converts' do
+ expect(subject.from_payload(payload_bytes)).to eq('test-payload')
+ expect(codec).to have_received(:decode)
+ end
+ end
+
+ describe '#from_payload_map_without_codec' do
+ let(:payload_map) do
+ Google::Protobuf::Map.new(:string, :message, Temporalio::Api::Common::V1::Payload).tap do |m|
+ m['first'] = payload_bytes
+ m['second'] = payload_nil
+ end
+ end
+
+ it 'converts' do
+ expect(subject.from_payload_map_without_codec(payload_map))
+ .to eq('first' => 'test-payload', 'second' => nil)
+ expect(codec).not_to have_received(:decode)
+ end
+ end
+
+ describe '#from_result_payloads' do
+ it 'decodes and converts' do
+ expect(subject.from_result_payloads(payloads)).to eq('test-payload')
+ expect(codec).to have_received(:decodes)
+ end
+ end
+
+ describe '#from_details_payloads' do
+ it 'decodes and converts first payload' do
+ expect(subject.from_details_payloads(payloads)).to eq('test-payload')
+ expect(codec).to have_received(:decodes)
+ end
+ end
+
+ describe '#from_signal_payloads' do
+ it 'decodes and converts first payload' do
+ expect(subject.from_signal_payloads(payloads)).to eq('test-payload')
+ expect(codec).to have_received(:decodes)
+ end
+ end
+
+ describe '#from_query_payloads' do
+ it 'decodes and converts first payload' do
+ expect(subject.from_query_payloads(payloads)).to eq('test-payload')
+ expect(codec).to have_received(:decodes)
+ end
+ end
+
+ describe '#from_payload_map' do
+ let(:payload_map) do
+ Google::Protobuf::Map.new(:string, :message, Temporalio::Api::Common::V1::Payload).tap do |m|
+ m['first'] = payload_bytes
+ m['second'] = payload_nil
+ end
+ end
+
+ it 'decodes and converts first payload' do
+ expect(subject.from_payload_map(payload_map))
+ .to eq('first' => 'test-payload', 'second' => nil)
+ expect(codec).to have_received(:decode).twice
+ end
+ end
+
+ describe '#to_payloads' do
+ it 'converts and encodes' do
+ expect(subject.to_payloads(['test-payload'.b, nil])).to eq(payloads)
+ expect(codec).to have_received(:encodes)
+ end
+ end
+
+ describe '#to_payload' do
+ it 'converts and encodes' do
+ expect(subject.to_payload('test-payload'.b)).to eq(payload_bytes)
+ expect(codec).to have_received(:encode)
+ end
+ end
+
+ describe '#to_payload_map_without_codec' do
+ let(:payload_map) { { first: payload_bytes, second: payload_nil } }
+
+ it 'converts' do
+ expect(subject.to_payload_map_without_codec(first: 'test-payload'.b, second: nil)).to eq(payload_map)
+ expect(codec).not_to have_received(:encode)
+ end
+ end
+
+ describe '#to_result_payloads' do
+ let(:payloads) { Fabricate(:api_payloads, payloads_array: [payload_bytes]) }
+
+ it 'converts and encodes' do
+ expect(subject.to_result_payloads('test-payload'.b)).to eq(payloads)
+ expect(codec).to have_received(:encodes)
+ end
+ end
+
+ describe '#to_details_payloads' do
+ let(:payloads) { Fabricate(:api_payloads, payloads_array: [payload_bytes]) }
+
+ it 'converts and encodes' do
+ expect(subject.to_details_payloads('test-payload'.b)).to eq(payloads)
+ expect(codec).to have_received(:encodes)
+ end
+ end
+
+ describe '#to_signal_payloads' do
+ let(:payloads) { Fabricate(:api_payloads, payloads_array: [payload_bytes]) }
+
+ it 'converts and encodes' do
+ expect(subject.to_signal_payloads('test-payload'.b)).to eq(payloads)
+ expect(codec).to have_received(:encodes)
+ end
+ end
+
+ describe '#to_query_payloads' do
+ let(:payloads) { Fabricate(:api_payloads, payloads_array: [payload_bytes]) }
+
+ it 'converts and encodes' do
+ expect(subject.to_query_payloads('test-payload'.b)).to eq(payloads)
+ expect(codec).to have_received(:encodes)
+ end
+ end
+
+ describe '#to_payload_map' do
+ let(:payload_map) { { first: payload_bytes, second: payload_nil } }
+
+ it 'converts and encodes' do
+ expect(subject.to_payload_map(first: 'test-payload'.b, second: nil)).to eq(payload_map)
+ expect(codec).to have_received(:encode).twice
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/executable_lookup_spec.rb b/spec/unit/lib/temporal/executable_lookup_spec.rb
index ec9d59a5..197dc334 100644
--- a/spec/unit/lib/temporal/executable_lookup_spec.rb
+++ b/spec/unit/lib/temporal/executable_lookup_spec.rb
@@ -1,7 +1,18 @@
require 'temporal/executable_lookup'
+require 'temporal/concerns/executable'
describe Temporal::ExecutableLookup do
- class TestClass; end
+ class TestClass
+ extend Temporal::Concerns::Executable
+ end
+
+ class MyDynamicActivity
+ extend Temporal::Concerns::Executable
+ end
+
+ class IllegalSecondDynamicActivity
+ extend Temporal::Concerns::Executable
+ end
describe '#add' do
it 'adds a class to the lookup map' do
@@ -11,6 +22,15 @@ class TestClass; end
end
end
+ describe '#add_dynamic' do
+ it 'fails on the second dynamic activity' do
+ subject.add_dynamic('MyDynamicActivity', MyDynamicActivity)
+ expect do
+ subject.add_dynamic('IllegalSecondDynamicActivity', IllegalSecondDynamicActivity)
+ end.to raise_error(Temporal::ExecutableLookup::SecondDynamicExecutableError)
+ end
+ end
+
describe '#find' do
before { subject.add('foo', TestClass) }
@@ -21,5 +41,13 @@ class TestClass; end
it 'returns nil if there were no matches' do
expect(subject.find('bar')).to eq(nil)
end
+
+ it 'falls back to the dynamic executable' do
+ subject.add('TestClass', TestClass)
+ subject.add_dynamic('MyDynamicActivity', MyDynamicActivity)
+
+ expect(subject.find('TestClass')).to eq(TestClass)
+ expect(subject.find('SomethingElse')).to eq(MyDynamicActivity)
+ end
end
end
diff --git a/spec/unit/lib/temporal/execution_options_spec.rb b/spec/unit/lib/temporal/execution_options_spec.rb
index ba4c84d1..d0c9d017 100644
--- a/spec/unit/lib/temporal/execution_options_spec.rb
+++ b/spec/unit/lib/temporal/execution_options_spec.rb
@@ -66,7 +66,8 @@ class TestExecutionOptionsWorkflow < Temporal::Workflow
{
namespace: 'test-namespace',
timeouts: { start_to_close: 10 },
- headers: { 'TestHeader' => 'Test' }
+ headers: { 'TestHeader' => 'Test' },
+ search_attributes: { 'DoubleSearchAttribute' => 3.14 },
}
end
let(:defaults) do
@@ -74,7 +75,8 @@ class TestExecutionOptionsWorkflow < Temporal::Workflow
namespace: 'default-namespace',
task_queue: 'default-task-queue',
timeouts: { schedule_to_close: 42 },
- headers: { 'DefaultHeader' => 'Default' }
+ headers: { 'DefaultHeader' => 'Default' },
+ search_attributes: { 'DefaultIntSearchAttribute' => 256 },
)
end
@@ -85,6 +87,7 @@ class TestExecutionOptionsWorkflow < Temporal::Workflow
expect(subject.retry_policy).to be_nil
expect(subject.timeouts).to eq(schedule_to_close: 42, start_to_close: 10)
expect(subject.headers).to eq('DefaultHeader' => 'Default', 'TestHeader' => 'Test')
+ expect(subject.search_attributes).to eq('DefaultIntSearchAttribute' => 256, 'DoubleSearchAttribute' => 3.14)
end
end
@@ -96,10 +99,11 @@ class TestExecutionOptionsWorkflow < Temporal::Workflow
task_queue: 'test-task-queue',
retry_policy: { interval: 1, backoff: 2, max_attempts: 5 },
timeouts: { start_to_close: 10 },
- headers: { 'TestHeader' => 'Test' }
+ headers: { 'TestHeader' => 'Test' },
+ start_delay: 10
}
end
-
+
it 'is initialized with full options' do
expect(subject.name).to eq(options[:name])
expect(subject.namespace).to eq(options[:namespace])
@@ -110,12 +114,13 @@ class TestExecutionOptionsWorkflow < Temporal::Workflow
expect(subject.retry_policy.max_attempts).to eq(options[:retry_policy][:max_attempts])
expect(subject.timeouts).to eq(options[:timeouts])
expect(subject.headers).to eq(options[:headers])
+ expect(subject.start_delay).to eq(options[:start_delay])
end
end
-
+
context 'when retry policy options are invalid' do
let(:options) { { retry_policy: { max_attempts: 10 } } }
-
+
it 'raises' do
expect { subject }.to raise_error(
Temporal::RetryPolicy::InvalidRetryPolicy,
@@ -191,7 +196,8 @@ class TestWorkflow < Temporal::Workflow
namespace: 'default-namespace',
task_queue: 'default-task-queue',
timeouts: { schedule_to_close: 42 },
- headers: { 'DefaultHeader' => 'Default', 'HeaderA' => 'DefaultA' }
+ headers: { 'DefaultHeader' => 'Default', 'HeaderA' => 'DefaultA' },
+ search_attributes: {},
)
end
diff --git a/spec/unit/lib/temporal/grpc_client_spec.rb b/spec/unit/lib/temporal/grpc_client_spec.rb
deleted file mode 100644
index bcc0458d..00000000
--- a/spec/unit/lib/temporal/grpc_client_spec.rb
+++ /dev/null
@@ -1,134 +0,0 @@
-describe Temporal::Connection::GRPC do
- subject { Temporal::Connection::GRPC.new(nil, nil, nil) }
- let(:grpc_stub) { double('grpc stub') }
- let(:namespace) { 'test-namespace' }
- let(:workflow_id) { SecureRandom.uuid }
- let(:run_id) { SecureRandom.uuid }
- let(:now) { Time.now}
-
- before do
- allow(subject).to receive(:client).and_return(grpc_stub)
-
- allow(Time).to receive(:now).and_return(now)
- end
-
- describe '#start_workflow_execution' do
- it 'provides the existing run_id when the workflow is already started' do
- allow(grpc_stub).to receive(:start_workflow_execution).and_raise(
- GRPC::AlreadyExists,
- 'Workflow execution already finished successfully. WorkflowId: TestWorkflow-1, RunId: baaf1d86-4459-4ecd-a288-47aeae55245d. Workflow Id reuse policy: allow duplicate workflow Id if last run failed.'
- )
-
- expect do
- subject.start_workflow_execution(
- namespace: namespace,
- workflow_id: workflow_id,
- workflow_name: 'Test',
- task_queue: 'test',
- execution_timeout: 0,
- run_timeout: 0,
- task_timeout: 0
- )
- end.to raise_error(Temporal::WorkflowExecutionAlreadyStartedFailure) do |e|
- expect(e.run_id).to eql('baaf1d86-4459-4ecd-a288-47aeae55245d')
- end
- end
- end
-
- describe '#get_workflow_execution_history' do
- let(:response) do
- Temporal::Api::WorkflowService::V1::GetWorkflowExecutionHistoryResponse.new(
- history: Temporal::Api::History::V1::History.new,
- next_page_token: nil
- )
- end
-
- before { allow(grpc_stub).to receive(:get_workflow_execution_history).and_return(response) }
-
- it 'calls GRPC service with supplied arguments' do
- subject.get_workflow_execution_history(
- namespace: namespace,
- workflow_id: workflow_id,
- run_id: run_id
- )
-
- expect(grpc_stub).to have_received(:get_workflow_execution_history) do |request|
- expect(request).to be_an_instance_of(Temporal::Api::WorkflowService::V1::GetWorkflowExecutionHistoryRequest)
- expect(request.namespace).to eq(namespace)
- expect(request.execution.workflow_id).to eq(workflow_id)
- expect(request.execution.run_id).to eq(run_id)
- expect(request.next_page_token).to be_empty
- expect(request.wait_new_event).to eq(false)
- expect(request.history_event_filter_type).to eq(
- Temporal::Api::Enums::V1::HistoryEventFilterType.lookup(
- Temporal::Api::Enums::V1::HistoryEventFilterType::HISTORY_EVENT_FILTER_TYPE_ALL_EVENT
- )
- )
- end
- end
-
- context 'when wait_for_new_event is true' do
- let (:timeout) { 13 }
- it 'calls GRPC service with a deadline' do
- subject.get_workflow_execution_history(
- namespace: namespace,
- workflow_id: workflow_id,
- run_id: run_id,
- wait_for_new_event: true,
- timeout: timeout
- )
-
- expect(grpc_stub).to have_received(:get_workflow_execution_history) do |request, deadline:|
- expect(request.wait_new_event).to eq(true)
- expect(deadline).to eq(now + timeout)
- end
- end
-
- it 'demands a timeout to be specified' do
- expect do
- subject.get_workflow_execution_history(
- namespace: namespace,
- workflow_id: workflow_id,
- run_id: run_id,
- wait_for_new_event: true
- )
- end.to raise_error do |e|
- expect(e.message).to eq("You must specify a timeout when wait_for_new_event = true.")
- end
- end
-
- it 'disallows a timeout larger than the server timeout' do
- expect do
- subject.get_workflow_execution_history(
- namespace: namespace,
- workflow_id: workflow_id,
- run_id: run_id,
- wait_for_new_event: true,
- timeout: 60
- )
- end.to raise_error(Temporal::ClientError) do |e|
- expect(e.message).to eq("You may not specify a timeout of more than 30 seconds, got: 60.")
- end
- end
- end
-
- context 'when event_type is :close' do
- it 'calls GRPC service' do
- subject.get_workflow_execution_history(
- namespace: namespace,
- workflow_id: workflow_id,
- run_id: run_id,
- event_type: :close
- )
-
- expect(grpc_stub).to have_received(:get_workflow_execution_history) do |request|
- expect(request.history_event_filter_type).to eq(
- Temporal::Api::Enums::V1::HistoryEventFilterType.lookup(
- Temporal::Api::Enums::V1::HistoryEventFilterType::HISTORY_EVENT_FILTER_TYPE_CLOSE_EVENT
- )
- )
- end
- end
- end
- end
-end
diff --git a/spec/unit/lib/temporal/grpc_spec.rb b/spec/unit/lib/temporal/grpc_spec.rb
new file mode 100644
index 00000000..5639a0e9
--- /dev/null
+++ b/spec/unit/lib/temporal/grpc_spec.rb
@@ -0,0 +1,980 @@
+require 'temporal/connection/grpc'
+require 'temporal/converter_wrapper'
+require 'temporal/workflow/query_result'
+
+describe Temporal::Connection::GRPC do
+ let(:identity) { 'my-identity' }
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+ let(:binary_checksum) { 'v1.0.0' }
+ let(:grpc_stub) { double('grpc stub') }
+ let(:grpc_operator_stub) { double('grpc stub') }
+ let(:namespace) { 'test-namespace' }
+ let(:workflow_id) { SecureRandom.uuid }
+ let(:run_id) { SecureRandom.uuid }
+ let(:now) { Time.now}
+ let(:options) { {} }
+
+ subject { Temporal::Connection::GRPC.new(nil, nil, identity, :this_channel_is_insecure, converter, options) }
+
+ before do
+ allow(subject).to receive(:client).and_return(grpc_stub)
+ allow(subject).to receive(:operator_client).and_return(grpc_operator_stub)
+
+ allow(Time).to receive(:now).and_return(now)
+ end
+
+ describe '#start_workflow_execution' do
+ it 'provides the existing run_id when the workflow is already started' do
+ allow(grpc_stub).to receive(:start_workflow_execution).and_raise(
+ GRPC::AlreadyExists,
+ 'Workflow execution already finished successfully. WorkflowId: TestWorkflow-1, RunId: baaf1d86-4459-4ecd-a288-47aeae55245d. Workflow Id reuse policy: allow duplicate workflow Id if last run failed.'
+ )
+
+ expect do
+ subject.start_workflow_execution(
+ namespace: namespace,
+ workflow_id: workflow_id,
+ workflow_name: 'Test',
+ task_queue: 'test',
+ execution_timeout: 0,
+ run_timeout: 0,
+ task_timeout: 0,
+ memo: {},
+ search_attributes: {},
+ workflow_id_reuse_policy: :allow,
+ )
+ end.to raise_error(Temporal::WorkflowExecutionAlreadyStartedFailure) do |e|
+ expect(e.run_id).to eql('baaf1d86-4459-4ecd-a288-47aeae55245d')
+ end
+ end
+
+ it 'starts a workflow with scalar arguments' do
+ allow(grpc_stub).to receive(:start_workflow_execution).and_return(Temporalio::Api::WorkflowService::V1::SignalWithStartWorkflowExecutionResponse.new(run_id: 'xxx'))
+
+ datetime_attribute_value = Time.now
+ subject.start_workflow_execution(
+ namespace: namespace,
+ workflow_id: workflow_id,
+ workflow_name: 'workflow_name',
+ task_queue: 'task_queue',
+ input: ['foo'],
+ execution_timeout: 1,
+ run_timeout: 2,
+ task_timeout: 3,
+ start_delay: 10,
+ memo: {},
+ search_attributes: {
+ 'foo-int-attribute' => 256,
+ 'foo-string-attribute' => "bar",
+ 'foo-double-attribute' => 6.28,
+ 'foo-bool-attribute' => false,
+ # Temporal::Workflow::Context::Helpers.process_search_attributes will have converted
+ # any `Time` instances to strings by the time `start_workflow_execution` is called,
+ # so do the same here.
+ 'foo-datetime-attribute' => datetime_attribute_value.utc.iso8601,
+ },
+ workflow_id_reuse_policy: :reject,
+ )
+
+ expect(grpc_stub).to have_received(:start_workflow_execution) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::StartWorkflowExecutionRequest)
+ expect(request.namespace).to eq(namespace)
+ expect(request.workflow_id).to eq(workflow_id)
+ expect(request.workflow_type.name).to eq('workflow_name')
+ expect(request.task_queue.name).to eq('task_queue')
+ expect(request.input.payloads[0].data).to eq('"foo"')
+ expect(request.workflow_execution_timeout.seconds).to eq(1)
+ expect(request.workflow_run_timeout.seconds).to eq(2)
+ expect(request.workflow_task_timeout.seconds).to eq(3)
+ expect(request.workflow_start_delay.seconds).to eq(10)
+ expect(request.workflow_id_reuse_policy).to eq(:WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE)
+ expect(request.search_attributes.indexed_fields).to eq({
+ 'foo-int-attribute' => Temporalio::Api::Common::V1::Payload.new(data: '256', metadata: { 'encoding' => 'json/plain' }),
+ 'foo-string-attribute' => Temporalio::Api::Common::V1::Payload.new(data: '"bar"', metadata: { 'encoding' => 'json/plain' }),
+ 'foo-double-attribute' => Temporalio::Api::Common::V1::Payload.new(data: '6.28', metadata: { 'encoding' => 'json/plain' }),
+ 'foo-bool-attribute' => Temporalio::Api::Common::V1::Payload.new(data: 'false', metadata: { 'encoding' => 'json/plain' }),
+ 'foo-datetime-attribute' => Temporalio::Api::Common::V1::Payload.new(data: "\"#{datetime_attribute_value.utc.iso8601}\"", metadata: { 'encoding' => 'json/plain' }),
+ })
+ end
+ end
+
+ it 'raises when an invalid workflow_id_reuse_policy is given' do
+ expect do
+ subject.start_workflow_execution(
+ namespace: namespace,
+ workflow_id: workflow_id,
+ workflow_name: 'Test',
+ task_queue: 'test',
+ execution_timeout: 0,
+ run_timeout: 0,
+ task_timeout: 0,
+ memo: {},
+ search_attributes: {},
+ workflow_id_reuse_policy: :not_a_valid_policy
+ )
+ end.to raise_error(Temporal::Connection::ArgumentError) do |e|
+ expect(e.message).to eq('Unknown workflow_id_reuse_policy specified: not_a_valid_policy')
+ end
+ end
+ end
+
+ describe '#signal_with_start_workflow' do
+ let(:temporal_response) do
+ Temporalio::Api::WorkflowService::V1::SignalWithStartWorkflowExecutionResponse.new(run_id: 'xxx')
+ end
+
+ before { allow(grpc_stub).to receive(:signal_with_start_workflow_execution).and_return(temporal_response) }
+
+ it 'starts a workflow with a signal with scalar arguments' do
+ subject.signal_with_start_workflow_execution(
+ namespace: namespace,
+ workflow_id: workflow_id,
+ workflow_name: 'workflow_name',
+ task_queue: 'task_queue',
+ input: ['foo'],
+ execution_timeout: 1,
+ run_timeout: 2,
+ task_timeout: 3,
+ start_delay: 10,
+ workflow_id_reuse_policy: :allow,
+ signal_name: 'the question',
+ signal_input: 'what do you get if you multiply six by nine?'
+ )
+
+ expect(grpc_stub).to have_received(:signal_with_start_workflow_execution) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::SignalWithStartWorkflowExecutionRequest)
+ expect(request.namespace).to eq(namespace)
+ expect(request.workflow_id).to eq(workflow_id)
+ expect(request.workflow_type.name).to eq('workflow_name')
+ expect(request.task_queue.name).to eq('task_queue')
+ expect(request.input.payloads[0].data).to eq('"foo"')
+ expect(request.workflow_execution_timeout.seconds).to eq(1)
+ expect(request.workflow_run_timeout.seconds).to eq(2)
+ expect(request.workflow_task_timeout.seconds).to eq(3)
+ expect(request.workflow_start_delay.seconds).to eq(10)
+ expect(request.signal_name).to eq('the question')
+ expect(request.signal_input.payloads[0].data).to eq('"what do you get if you multiply six by nine?"')
+ expect(request.workflow_id_reuse_policy).to eq(:WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE)
+ end
+ end
+
+ it 'raises when an invalid workflow_id_reuse_policy is given' do
+ expect do
+ subject.signal_with_start_workflow_execution(
+ namespace: namespace,
+ workflow_id: workflow_id,
+ workflow_name: 'Test',
+ task_queue: 'test',
+ execution_timeout: 0,
+ run_timeout: 0,
+ task_timeout: 0,
+ memo: {},
+ search_attributes: {},
+ workflow_id_reuse_policy: :not_a_valid_policy,
+ signal_name: 'the question',
+ signal_input: 'what do you get if you multiply six by nine?'
+ )
+ end.to raise_error(Temporal::Connection::ArgumentError) do |e|
+ expect(e.message).to eq('Unknown workflow_id_reuse_policy specified: not_a_valid_policy')
+ end
+ end
+ end
+
+ describe "#list_namespaces" do
+ let (:response) do
+ Temporalio::Api::WorkflowService::V1::ListNamespacesResponse.new(
+ namespaces: [Temporalio::Api::WorkflowService::V1::DescribeNamespaceResponse.new],
+ next_page_token: ""
+ )
+ end
+
+ before { allow(grpc_stub).to receive(:list_namespaces).and_return(response) }
+
+ it 'calls GRPC service with supplied arguments' do
+ next_page_token = "next-page-token-id"
+
+ subject.list_namespaces(
+ page_size: 10,
+ next_page_token: next_page_token,
+ )
+
+ expect(grpc_stub).to have_received(:list_namespaces) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListNamespacesRequest)
+ expect(request.page_size).to eq(10)
+ expect(request.next_page_token).to eq(next_page_token)
+ end
+ end
+ end
+
+ describe '#get_workflow_execution_history' do
+ let(:response) do
+ Temporalio::Api::WorkflowService::V1::GetWorkflowExecutionHistoryResponse.new(
+ history: Temporalio::Api::History::V1::History.new,
+ next_page_token: nil
+ )
+ end
+
+ before { allow(grpc_stub).to receive(:get_workflow_execution_history).and_return(response) }
+
+ it 'calls GRPC service with supplied arguments' do
+ subject.get_workflow_execution_history(
+ namespace: namespace,
+ workflow_id: workflow_id,
+ run_id: run_id
+ )
+
+ expect(grpc_stub).to have_received(:get_workflow_execution_history) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::GetWorkflowExecutionHistoryRequest)
+ expect(request.namespace).to eq(namespace)
+ expect(request.execution.workflow_id).to eq(workflow_id)
+ expect(request.execution.run_id).to eq(run_id)
+ expect(request.next_page_token).to be_empty
+ expect(request.wait_new_event).to eq(false)
+ expect(request.history_event_filter_type).to eq(
+ Temporalio::Api::Enums::V1::HistoryEventFilterType.lookup(
+ Temporalio::Api::Enums::V1::HistoryEventFilterType::HISTORY_EVENT_FILTER_TYPE_ALL_EVENT
+ )
+ )
+ end
+ end
+
+ context 'when wait_for_new_event is true' do
+ let (:timeout) { 13 }
+ it 'calls GRPC service with a deadline' do
+ subject.get_workflow_execution_history(
+ namespace: namespace,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ wait_for_new_event: true,
+ timeout: timeout
+ )
+
+ expect(grpc_stub).to have_received(:get_workflow_execution_history).with(anything, deadline: now + timeout)
+ end
+
+ it 'demands a timeout to be specified' do
+ expect do
+ subject.get_workflow_execution_history(
+ namespace: namespace,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ wait_for_new_event: true
+ )
+ end.to raise_error do |e|
+ expect(e.message).to eq("You must specify a timeout when wait_for_new_event = true.")
+ end
+ end
+
+ it 'disallows a timeout larger than the server timeout' do
+ expect do
+ subject.get_workflow_execution_history(
+ namespace: namespace,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ wait_for_new_event: true,
+ timeout: 60
+ )
+ end.to raise_error(Temporal::ClientError) do |e|
+ expect(e.message).to eq("You may not specify a timeout of more than 30 seconds, got: 60.")
+ end
+ end
+ end
+
+ context 'when event_type is :close' do
+ it 'calls GRPC service' do
+ subject.get_workflow_execution_history(
+ namespace: namespace,
+ workflow_id: workflow_id,
+ run_id: run_id,
+ event_type: :close
+ )
+
+ expect(grpc_stub).to have_received(:get_workflow_execution_history) do |request|
+ expect(request.history_event_filter_type).to eq(
+ Temporalio::Api::Enums::V1::HistoryEventFilterType.lookup(
+ Temporalio::Api::Enums::V1::HistoryEventFilterType::HISTORY_EVENT_FILTER_TYPE_CLOSE_EVENT
+ )
+ )
+ end
+ end
+ end
+
+ describe '#list_open_workflow_executions' do
+ let(:namespace) { 'test-namespace' }
+ let(:from) { Time.now - 600 }
+ let(:to) { Time.now }
+ let(:args) { { namespace: namespace, from: from, to: to } }
+ let(:temporal_response) do
+ Temporalio::Api::WorkflowService::V1::ListOpenWorkflowExecutionsResponse.new(executions: [], next_page_token: '')
+ end
+
+ before do
+ allow(grpc_stub).to receive(:list_open_workflow_executions).and_return(temporal_response)
+ end
+
+ it 'makes an API request' do
+ subject.list_open_workflow_executions(**args)
+
+ expect(grpc_stub).to have_received(:list_open_workflow_executions) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListOpenWorkflowExecutionsRequest)
+ expect(request.maximum_page_size).to eq(described_class::DEFAULT_OPTIONS[:max_page_size])
+ expect(request.next_page_token).to eq('')
+ expect(request.start_time_filter).to be_an_instance_of(Temporalio::Api::Filter::V1::StartTimeFilter)
+ expect(request.start_time_filter.earliest_time.to_time)
+ .to eq(from)
+ expect(request.start_time_filter.latest_time.to_time)
+ .to eq(to)
+ expect(request.execution_filter).to eq(nil)
+ expect(request.type_filter).to eq(nil)
+ end
+ end
+
+ context 'when next_page_token is supplied' do
+ it 'makes an API request' do
+ subject.list_open_workflow_executions(**args.merge(next_page_token: 'x'))
+
+ expect(grpc_stub).to have_received(:list_open_workflow_executions) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListOpenWorkflowExecutionsRequest)
+ expect(request.next_page_token).to eq('x')
+ end
+ end
+ end
+
+ context 'when workflow_id is supplied' do
+ it 'makes an API request' do
+ subject.list_open_workflow_executions(**args.merge(workflow_id: 'xxx'))
+
+ expect(grpc_stub).to have_received(:list_open_workflow_executions) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListOpenWorkflowExecutionsRequest)
+ expect(request.execution_filter)
+ .to be_an_instance_of(Temporalio::Api::Filter::V1::WorkflowExecutionFilter)
+ expect(request.execution_filter.workflow_id).to eq('xxx')
+ end
+ end
+ end
+
+ context 'when workflow is supplied' do
+ it 'makes an API request' do
+ subject.list_open_workflow_executions(**args.merge(workflow: 'TestWorkflow'))
+
+ expect(grpc_stub).to have_received(:list_open_workflow_executions) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListOpenWorkflowExecutionsRequest)
+ expect(request.type_filter).to be_an_instance_of(Temporalio::Api::Filter::V1::WorkflowTypeFilter)
+ expect(request.type_filter.name).to eq('TestWorkflow')
+ end
+ end
+ end
+ end
+
+ describe "#count_workflow_executions" do
+ let(:namespace) { 'test-namespace' }
+ let(:query) { 'StartDate < 2022-04-07T20:48:20Z order by StartTime desc' }
+ let(:args) { { namespace: namespace, query: query } }
+ let(:temporal_response) do
+ Temporalio::Api::WorkflowService::V1::CountWorkflowExecutionsResponse.new(count: 0)
+ end
+
+ before do
+ allow(grpc_stub).to receive(:count_workflow_executions).and_return(temporal_response)
+ end
+
+ it 'makes an API request' do
+ subject.count_workflow_executions(**args)
+
+ expect(grpc_stub).to have_received(:count_workflow_executions) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::CountWorkflowExecutionsRequest)
+ expect(request.namespace).to eq(namespace)
+ expect(request.query).to eq(query)
+ end
+ end
+ end
+
+ describe '#list_workflow_executions' do
+ let(:namespace) { 'test-namespace' }
+ let(:query) { 'StartDate < 2022-04-07T20:48:20Z order by StartTime desc' }
+ let(:args) { { namespace: namespace, query: query } }
+ let(:temporal_response) do
+ Temporalio::Api::WorkflowService::V1::ListWorkflowExecutionsResponse.new(executions: [], next_page_token: '')
+ end
+ let(:temporal_paginated_response) do
+ Temporalio::Api::WorkflowService::V1::ListWorkflowExecutionsResponse.new(executions: [], next_page_token: 'more-results')
+ end
+
+ before do
+ allow(grpc_stub).to receive(:list_workflow_executions).and_return(temporal_response)
+ end
+
+ it 'makes an API request' do
+ subject.list_workflow_executions(**args)
+
+ expect(grpc_stub).to have_received(:list_workflow_executions) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListWorkflowExecutionsRequest)
+ expect(request.page_size).to eq(described_class::DEFAULT_OPTIONS[:max_page_size])
+ expect(request.next_page_token).to eq('')
+ expect(request.namespace).to eq(namespace)
+ expect(request.query).to eq(query)
+ end
+ end
+
+ context 'when next_page_token is supplied' do
+ it 'makes an API request' do
+ subject.list_workflow_executions(**args.merge(next_page_token: 'x'))
+
+ expect(grpc_stub).to have_received(:list_workflow_executions) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListWorkflowExecutionsRequest)
+ expect(request.next_page_token).to eq('x')
+ end
+ end
+ end
+ end
+
+ describe '#list_closed_workflow_executions' do
+ let(:namespace) { 'test-namespace' }
+ let(:from) { Time.now - 600 }
+ let(:to) { Time.now }
+ let(:args) { { namespace: namespace, from: from, to: to } }
+ let(:temporal_response) do
+ Temporalio::Api::WorkflowService::V1::ListClosedWorkflowExecutionsResponse.new(executions: [], next_page_token: '')
+ end
+
+ before do
+ allow(grpc_stub).to receive(:list_closed_workflow_executions).and_return(temporal_response)
+ end
+
+ it 'makes an API request' do
+ subject.list_closed_workflow_executions(**args)
+
+ expect(grpc_stub).to have_received(:list_closed_workflow_executions) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListClosedWorkflowExecutionsRequest)
+ expect(request.maximum_page_size).to eq(described_class::DEFAULT_OPTIONS[:max_page_size])
+ expect(request.next_page_token).to eq('')
+ expect(request.start_time_filter).to be_an_instance_of(Temporalio::Api::Filter::V1::StartTimeFilter)
+ expect(request.start_time_filter.earliest_time.to_time)
+ .to eq(from)
+ expect(request.start_time_filter.latest_time.to_time)
+ .to eq(to)
+ expect(request.execution_filter).to eq(nil)
+ expect(request.type_filter).to eq(nil)
+ expect(request.status_filter).to eq(nil)
+ end
+ end
+
+ context 'when next_page_token is supplied' do
+ it 'makes an API request' do
+ subject.list_closed_workflow_executions(**args.merge(next_page_token: 'x'))
+
+ expect(grpc_stub).to have_received(:list_closed_workflow_executions) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListClosedWorkflowExecutionsRequest)
+ expect(request.next_page_token).to eq('x')
+ end
+ end
+ end
+
+ context 'when status is supplied' do
+ let(:api_completed_status) { Temporalio::Api::Enums::V1::WorkflowExecutionStatus::WORKFLOW_EXECUTION_STATUS_COMPLETED }
+
+ it 'makes an API request' do
+ subject.list_closed_workflow_executions(
+ **args.merge(status: Temporal::Workflow::Status::COMPLETED)
+ )
+
+ expect(grpc_stub).to have_received(:list_closed_workflow_executions) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListClosedWorkflowExecutionsRequest)
+ expect(request.status_filter).to eq(Temporalio::Api::Filter::V1::StatusFilter.new(status: api_completed_status))
+ end
+ end
+ end
+
+ context 'when workflow_id is supplied' do
+ it 'makes an API request' do
+ subject.list_closed_workflow_executions(**args.merge(workflow_id: 'xxx'))
+
+ expect(grpc_stub).to have_received(:list_closed_workflow_executions) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListClosedWorkflowExecutionsRequest)
+ expect(request.execution_filter)
+ .to be_an_instance_of(Temporalio::Api::Filter::V1::WorkflowExecutionFilter)
+ expect(request.execution_filter.workflow_id).to eq('xxx')
+ end
+ end
+ end
+
+ context 'when workflow is supplied' do
+ it 'makes an API request' do
+ subject.list_closed_workflow_executions(**args.merge(workflow: 'TestWorkflow'))
+
+ expect(grpc_stub).to have_received(:list_closed_workflow_executions) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::ListClosedWorkflowExecutionsRequest)
+ expect(request.type_filter).to be_an_instance_of(Temporalio::Api::Filter::V1::WorkflowTypeFilter)
+ expect(request.type_filter.name).to eq('TestWorkflow')
+ end
+ end
+ end
+ end
+ end
+
+ describe '#respond_query_task_completed' do
+ let(:task_token) { SecureRandom.uuid }
+
+ before do
+ allow(grpc_stub)
+ .to receive(:respond_query_task_completed)
+ .and_return(Temporalio::Api::WorkflowService::V1::RespondQueryTaskCompletedResponse.new)
+ end
+
+ context 'when query result is an answer' do
+ let(:query_result) { Temporal::Workflow::QueryResult.answer(42) }
+
+ it 'makes an API request' do
+ subject.respond_query_task_completed(
+ namespace: namespace,
+ task_token: task_token,
+ query_result: query_result
+ )
+
+ expect(grpc_stub).to have_received(:respond_query_task_completed) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::RespondQueryTaskCompletedRequest)
+ expect(request.task_token).to eq(task_token)
+ expect(request.namespace).to eq(namespace)
+ expect(request.completed_type).to eq(Temporalio::Api::Enums::V1::QueryResultType.lookup(
+ Temporalio::Api::Enums::V1::QueryResultType::QUERY_RESULT_TYPE_ANSWERED)
+ )
+ expect(request.query_result).to eq(converter.to_query_payloads(42))
+ expect(request.error_message).to eq('')
+ end
+ end
+ end
+
+ context 'when query result is a failure' do
+ let(:query_result) { Temporal::Workflow::QueryResult.failure(StandardError.new('Test query failure')) }
+
+ it 'makes an API request' do
+ subject.respond_query_task_completed(
+ namespace: namespace,
+ task_token: task_token,
+ query_result: query_result
+ )
+
+ expect(grpc_stub).to have_received(:respond_query_task_completed) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::RespondQueryTaskCompletedRequest)
+ expect(request.task_token).to eq(task_token)
+ expect(request.namespace).to eq(namespace)
+ expect(request.completed_type).to eq(Temporalio::Api::Enums::V1::QueryResultType.lookup(
+ Temporalio::Api::Enums::V1::QueryResultType::QUERY_RESULT_TYPE_FAILED)
+ )
+ expect(request.query_result).to eq(nil)
+ expect(request.error_message).to eq('Test query failure')
+ end
+ end
+ end
+ end
+
+ describe '#respond_workflow_task_completed' do
+ let(:task_token) { SecureRandom.uuid }
+
+ before do
+ allow(grpc_stub)
+ .to receive(:respond_workflow_task_completed)
+ .and_return(Temporalio::Api::WorkflowService::V1::RespondWorkflowTaskCompletedResponse.new)
+ end
+
+ context 'when responding with query results' do
+ let(:query_results) do
+ {
+ '1' => Temporal::Workflow::QueryResult.answer(42),
+ '2' => Temporal::Workflow::QueryResult.failure(StandardError.new('Test query failure')),
+ }
+ end
+
+ it 'makes an API request' do
+ subject.respond_workflow_task_completed(
+ namespace: namespace,
+ task_token: task_token,
+ commands: [],
+ query_results: query_results,
+ binary_checksum: binary_checksum,
+ new_sdk_flags_used: [1]
+ )
+
+ expect(grpc_stub).to have_received(:respond_workflow_task_completed) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::RespondWorkflowTaskCompletedRequest)
+ expect(request.task_token).to eq(task_token)
+ expect(request.namespace).to eq(namespace)
+ expect(request.commands).to be_empty
+ expect(request.identity).to eq(identity)
+ expect(request.binary_checksum).to eq(binary_checksum)
+
+ expect(request.query_results.length).to eq(2)
+
+ expect(request.query_results['1']).to be_a(Temporalio::Api::Query::V1::WorkflowQueryResult)
+ expect(request.query_results['1'].result_type).to eq(Temporalio::Api::Enums::V1::QueryResultType.lookup(
+ Temporalio::Api::Enums::V1::QueryResultType::QUERY_RESULT_TYPE_ANSWERED)
+ )
+ expect(request.query_results['1'].answer).to eq(converter.to_query_payloads(42))
+
+ expect(request.query_results['2']).to be_a(Temporalio::Api::Query::V1::WorkflowQueryResult)
+ expect(request.query_results['2'].result_type).to eq(Temporalio::Api::Enums::V1::QueryResultType.lookup(
+ Temporalio::Api::Enums::V1::QueryResultType::QUERY_RESULT_TYPE_FAILED)
+ )
+ expect(request.query_results['2'].error_message).to eq('Test query failure')
+
+ expect(request.sdk_metadata.lang_used_flags).to eq([1])
+ end
+ end
+ end
+ end
+
+ describe '#respond_workflow_task_failed' do
+ let(:task_token) { 'task-token' }
+ let(:cause) { Temporalio::Api::Enums::V1::WorkflowTaskFailedCause::WORKFLOW_TASK_FAILED_CAUSE_UNHANDLED_COMMAND }
+
+ before { allow(grpc_stub).to receive(:respond_workflow_task_failed) }
+
+ it 'calls GRPC service with supplied arguments' do
+ subject.respond_workflow_task_failed(
+ namespace: namespace,
+ task_token: task_token,
+ cause: cause,
+ exception: Exception.new('something went wrong'),
+ binary_checksum: binary_checksum
+ )
+
+ expect(grpc_stub).to have_received(:respond_workflow_task_failed) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::RespondWorkflowTaskFailedRequest)
+ expect(request.namespace).to eq(namespace)
+ expect(request.task_token).to eq(task_token)
+ expect(request.cause).to be(Temporalio::Api::Enums::V1::WorkflowTaskFailedCause.lookup(cause))
+ expect(request.identity).to eq(identity)
+ expect(request.binary_checksum).to eq(binary_checksum)
+ end
+ end
+ end
+
+ describe '#poll_activity_task_queue' do
+ let(:task_queue) { 'test-task-queue' }
+ let(:temporal_response) do
+ Temporalio::Api::WorkflowService::V1::PollActivityTaskQueueResponse.new
+ end
+ let(:poll_request) do
+ instance_double(
+ "GRPC::ActiveCall::Operation",
+ execute: temporal_response
+ )
+ end
+
+ before do
+ allow(grpc_stub).to receive(:poll_activity_task_queue).with(anything, return_op: true).and_return(poll_request)
+ end
+
+ it 'makes an API request' do
+ subject.poll_activity_task_queue(namespace: namespace, task_queue: task_queue)
+
+ expect(grpc_stub).to have_received(:poll_activity_task_queue) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::PollActivityTaskQueueRequest)
+ expect(request.namespace).to eq(namespace)
+ expect(request.task_queue.name).to eq(task_queue)
+ expect(request.identity).to eq(identity)
+ expect(request.task_queue_metadata).to be_nil
+ end
+ end
+
+ it 'makes an API request with max_tasks_per_second in the metadata' do
+ subject.poll_activity_task_queue(namespace: namespace, task_queue: task_queue, max_tasks_per_second: 10)
+
+ expect(grpc_stub).to have_received(:poll_activity_task_queue) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::WorkflowService::V1::PollActivityTaskQueueRequest)
+ expect(request.namespace).to eq(namespace)
+ expect(request.task_queue.name).to eq(task_queue)
+ expect(request.identity).to eq(identity)
+ expect(request.task_queue_metadata).to_not be_nil
+ expect(request.task_queue_metadata.max_tasks_per_second).to_not be_nil
+ expect(request.task_queue_metadata.max_tasks_per_second.value).to eq(10)
+ end
+ end
+ end
+
+ describe '#add_custom_search_attributes' do
+ it 'calls GRPC service with supplied arguments' do
+ allow(grpc_operator_stub).to receive(:add_search_attributes)
+ subject.add_custom_search_attributes(
+ {
+ 'SomeTextField' => :text,
+ 'SomeKeywordField' => :keyword,
+ 'SomeIntField' => :int,
+ 'SomeDoubleField' => :double,
+ 'SomeBoolField' => :bool,
+ 'SomeDatetimeField' => :datetime,
+ 'SomeKeywordListField' => :keyword_list
+ },
+ namespace
+ )
+
+ expect(grpc_operator_stub).to have_received(:add_search_attributes) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::OperatorService::V1::AddSearchAttributesRequest)
+ expect(request.search_attributes).to eq(
+ {
+ 'SomeTextField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_TEXT,
+ 'SomeKeywordField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_KEYWORD,
+ 'SomeIntField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_INT,
+ 'SomeDoubleField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_DOUBLE,
+ 'SomeBoolField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_BOOL,
+ 'SomeDatetimeField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_DATETIME,
+ 'SomeKeywordListField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_KEYWORD_LIST
+ }
+ )
+ expect(request.namespace).to eq(namespace)
+ end
+ end
+
+ it 'attribute already exists' do
+ allow(grpc_operator_stub).to receive(:add_search_attributes).and_raise(GRPC::AlreadyExists.new(''))
+ expect do
+ subject.add_custom_search_attributes(
+ {
+ 'SomeTextField' => :text
+ },
+ namespace
+ )
+ end.to raise_error(Temporal::SearchAttributeAlreadyExistsFailure)
+ end
+
+ it 'failed to add attribute' do
+ allow(grpc_operator_stub).to receive(:add_search_attributes).and_raise(GRPC::Internal.new(''))
+ expect do
+ subject.add_custom_search_attributes(
+ {
+ 'SomeTextField' => :text
+ },
+ namespace
+ )
+ end.to raise_error(Temporal::SearchAttributeFailure)
+ end
+
+ it 'attributes can be symbols' do
+ allow(grpc_operator_stub).to receive(:add_search_attributes)
+ subject.add_custom_search_attributes(
+ {
+ SomeTextField: :text
+ },
+ namespace
+ )
+
+ expect(grpc_operator_stub).to have_received(:add_search_attributes) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::OperatorService::V1::AddSearchAttributesRequest)
+ expect(request.search_attributes).to eq(
+ {
+ 'SomeTextField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_TEXT
+ }
+ )
+ expect(request.namespace).to eq(namespace)
+ end
+ end
+
+ it 'invalid attribute type' do
+ expect do
+ subject.add_custom_search_attributes(
+ {
+ 'SomeBadField' => :foo
+ },
+ namespace
+ )
+ end.to raise_error(Temporal::InvalidSearchAttributeTypeFailure) do |e|
+ expect(e.to_s).to eq('Cannot add search attributes ({"SomeBadField"=>:foo}): unknown search attribute type :foo, supported types: [:text, :keyword, :int, :double, :bool, :datetime, :keyword_list]')
+ end
+ end
+ end
+
+ describe '#list_custom_search_attributes' do
+ it 'calls GRPC service with supplied arguments' do
+ allow(grpc_operator_stub).to receive(:list_search_attributes).and_return(
+ Temporalio::Api::OperatorService::V1::ListSearchAttributesResponse.new(
+ custom_attributes: {
+ 'SomeTextField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_TEXT,
+ 'SomeKeywordField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_KEYWORD,
+ 'SomeIntField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_INT,
+ 'SomeDoubleField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_DOUBLE,
+ 'SomeBoolField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_BOOL,
+ 'SomeDatetimeField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_DATETIME,
+ 'SomeKeywordListField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_KEYWORD_LIST
+ }
+ )
+ )
+
+ response = subject.list_custom_search_attributes(namespace)
+
+ expect(response).to eq(
+ {
+ 'SomeTextField' => :text,
+ 'SomeKeywordField' => :keyword,
+ 'SomeIntField' => :int,
+ 'SomeDoubleField' => :double,
+ 'SomeBoolField' => :bool,
+ 'SomeDatetimeField' => :datetime,
+ 'SomeKeywordListField' => :keyword_list
+ }
+ )
+
+ expect(grpc_operator_stub).to have_received(:list_search_attributes) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::OperatorService::V1::ListSearchAttributesRequest)
+ expect(request.namespace).to eq(namespace)
+ end
+ end
+
+ it 'unknown attribute type becomes nil' do
+ allow(grpc_operator_stub).to receive(:list_search_attributes).and_return(
+ Temporalio::Api::OperatorService::V1::ListSearchAttributesResponse.new(
+ custom_attributes: {
+ 'SomeTextField' => Temporalio::Api::Enums::V1::IndexedValueType::INDEXED_VALUE_TYPE_TEXT,
+ 'SomeUnknownField' => 100 # simulate some new type being added in the proto in the future
+ }
+ )
+ )
+
+ response = subject.list_custom_search_attributes(namespace)
+
+ expect(response).to eq(
+ {
+ 'SomeTextField' => :text,
+ 'SomeUnknownField' => nil
+ }
+ )
+ end
+ end
+
+ describe '#remove_custom_search_attributes' do
+ it 'calls GRPC service with supplied arguments' do
+ allow(grpc_operator_stub).to receive(:remove_search_attributes)
+
+ attributes = ['SomeTextField', 'SomeIntField']
+
+ subject.remove_custom_search_attributes(attributes, namespace)
+
+ expect(grpc_operator_stub).to have_received(:remove_search_attributes) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::OperatorService::V1::RemoveSearchAttributesRequest)
+ expect(request.search_attributes).to eq(attributes)
+ expect(request.namespace).to eq(namespace)
+ end
+ end
+
+ it 'cannot remove non-existent attribute' do
+ allow(grpc_operator_stub).to receive(:remove_search_attributes).and_raise(GRPC::NotFound.new)
+
+ attributes = ['SomeTextField', 'SomeIntField']
+
+ expect do
+ subject.remove_custom_search_attributes(attributes, namespace)
+ end.to raise_error(Temporal::NotFoundFailure)
+ end
+
+ it 'attribute names can be symbols' do
+ allow(grpc_operator_stub).to receive(:remove_search_attributes)
+
+ subject.remove_custom_search_attributes(%i[SomeTextField SomeIntField], namespace)
+
+ expect(grpc_operator_stub).to have_received(:remove_search_attributes) do |request|
+ expect(request).to be_an_instance_of(Temporalio::Api::OperatorService::V1::RemoveSearchAttributesRequest)
+ expect(request.search_attributes).to eq(%w[SomeTextField SomeIntField])
+ end
+ end
+ end
+
+ describe "passing in options" do
+ before do
+ allow(subject).to receive(:client).and_call_original
+ end
+
+ context "when keepalive_time_ms is passed" do
+ let(:options) { { keepalive_time_ms: 30_000 } }
+
+ it "passes the option to the channel args" do
+ expect(Temporalio::Api::WorkflowService::V1::WorkflowService::Stub).to receive(:new).with(
+ ":",
+ :this_channel_is_insecure,
+ timeout: 60,
+ interceptors: [instance_of(Temporal::Connection::ClientNameVersionInterceptor)],
+ channel_args: {
+ "grpc.keepalive_time_ms" => 30_000
+ }
+ )
+ subject.send(:client)
+ end
+ end
+
+ context "when passing retry_connection" do
+ let(:options) { { retry_connection: true } }
+
+ it "passes the option to the channel args" do
+ expect(Temporalio::Api::WorkflowService::V1::WorkflowService::Stub).to receive(:new).with(
+ ":",
+ :this_channel_is_insecure,
+ timeout: 60,
+ interceptors: [instance_of(Temporal::Connection::ClientNameVersionInterceptor)],
+ channel_args: {
+ "grpc.enable_retries" => 1,
+ "grpc.service_config" => {
+ methodConfig: [
+ {
+ name: [
+ {
+ service: "temporal.api.workflowservice.v1.WorkflowService",
+ }
+ ],
+ retryPolicy: {
+ retryableStatusCodes: ["UNAVAILABLE"],
+ maxAttempts: 3,
+ initialBackoff: "0.1s",
+ backoffMultiplier: 2.0,
+ maxBackoff: "0.3s"
+ }
+ }
+ ]
+ }.to_json
+ }
+ )
+ subject.send(:client)
+ end
+ end
+
+ context "when passing a custom retry policy" do
+ let(:options) { { retry_policy: retry_policy } }
+ let(:retry_policy) do
+ {
+ retryableStatusCodes: ["UNAVAILABLE", "INTERNAL"],
+ maxAttempts: 1,
+ initialBackoff: "0.2s",
+ backoffMultiplier: 1.0,
+ maxBackoff: "0.5s"
+ }
+ end
+
+ it "passes the policy to the channel args" do
+ expect(Temporalio::Api::WorkflowService::V1::WorkflowService::Stub).to receive(:new).with(
+ ":",
+ :this_channel_is_insecure,
+ timeout: 60,
+ interceptors: [instance_of(Temporal::Connection::ClientNameVersionInterceptor)],
+ channel_args: {
+ "grpc.enable_retries" => 1,
+ "grpc.service_config" => {
+ methodConfig: [
+ {
+ name: [
+ {
+ service: "temporal.api.workflowservice.v1.WorkflowService",
+ }
+ ],
+ retryPolicy: retry_policy
+ }
+ ]
+ }.to_json
+ }
+ )
+ subject.send(:client)
+ end
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/metadata/activity_spec.rb b/spec/unit/lib/temporal/metadata/activity_spec.rb
index 4df742be..db0f8e78 100644
--- a/spec/unit/lib/temporal/metadata/activity_spec.rb
+++ b/spec/unit/lib/temporal/metadata/activity_spec.rb
@@ -16,6 +16,8 @@
expect(subject.workflow_name).to eq(args.workflow_name)
expect(subject.headers).to eq(args.headers)
expect(subject.heartbeat_details).to eq(args.heartbeat_details)
+ expect(subject.scheduled_at).to eq(args.scheduled_at)
+ expect(subject.current_attempt_scheduled_at).to eq(args.current_attempt_scheduled_at)
end
it { is_expected.to be_frozen }
@@ -36,7 +38,9 @@
'namespace' => subject.namespace,
'workflow_id' => subject.workflow_id,
'workflow_name' => subject.workflow_name,
- 'workflow_run_id' => subject.workflow_run_id
+ 'workflow_run_id' => subject.workflow_run_id,
+ 'scheduled_at' => subject.scheduled_at.to_s,
+ 'current_attempt_scheduled_at' => subject.current_attempt_scheduled_at.to_s
})
end
end
diff --git a/spec/unit/lib/temporal/metadata/workflow_spec.rb b/spec/unit/lib/temporal/metadata/workflow_spec.rb
index 6bbe3af6..0ad4af48 100644
--- a/spec/unit/lib/temporal/metadata/workflow_spec.rb
+++ b/spec/unit/lib/temporal/metadata/workflow_spec.rb
@@ -6,6 +6,8 @@
let(:args) { Fabricate(:workflow_metadata) }
it 'sets the attributes' do
+ expect(subject.namespace).to eq(args.namespace)
+ expect(subject.id).to eq(args.id)
expect(subject.name).to eq(args.name)
expect(subject.run_id).to eq(args.run_id)
expect(subject.attempt).to eq(args.attempt)
@@ -25,9 +27,16 @@
it 'returns a hash' do
expect(subject.to_h).to eq({
+ 'namespace' => subject.namespace,
+ 'workflow_id' => subject.id,
'attempt' => subject.attempt,
'workflow_name' => subject.name,
- 'workflow_run_id' => subject.run_id
+ 'workflow_run_id' => subject.run_id,
+ 'parent_workflow_id' => nil,
+ 'parent_workflow_run_id' => nil,
+ 'task_queue' => subject.task_queue,
+ 'run_started_at' => subject.run_started_at.to_f,
+ 'memo' => subject.memo,
})
end
end
diff --git a/spec/unit/lib/temporal/metadata/workflow_task_spec.rb b/spec/unit/lib/temporal/metadata/workflow_task_spec.rb
index 7259c1f9..99f95a84 100644
--- a/spec/unit/lib/temporal/metadata/workflow_task_spec.rb
+++ b/spec/unit/lib/temporal/metadata/workflow_task_spec.rb
@@ -32,7 +32,7 @@
'namespace' => subject.namespace,
'workflow_id' => subject.workflow_id,
'workflow_name' => subject.workflow_name,
- 'workflow_run_id' => subject.workflow_run_id,
+ 'workflow_run_id' => subject.workflow_run_id
})
end
end
diff --git a/spec/unit/lib/temporal/metadata_spec.rb b/spec/unit/lib/temporal/metadata_spec.rb
index 18134e26..b3f02955 100644
--- a/spec/unit/lib/temporal/metadata_spec.rb
+++ b/spec/unit/lib/temporal/metadata_spec.rb
@@ -1,80 +1,83 @@
require 'temporal/metadata'
describe Temporal::Metadata do
- describe '.generate' do
- subject { described_class.generate(type, data, namespace) }
-
- context 'with activity type' do
- let(:type) { described_class::ACTIVITY_TYPE }
- let(:data) { Fabricate(:api_activity_task) }
- let(:namespace) { 'test-namespace' }
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
- it 'generates metadata' do
- expect(subject.namespace).to eq(namespace)
- expect(subject.id).to eq(data.activity_id)
- expect(subject.name).to eq(data.activity_type.name)
- expect(subject.task_token).to eq(data.task_token)
- expect(subject.attempt).to eq(data.attempt)
- expect(subject.workflow_run_id).to eq(data.workflow_execution.run_id)
- expect(subject.workflow_id).to eq(data.workflow_execution.workflow_id)
- expect(subject.workflow_name).to eq(data.workflow_type.name)
- expect(subject.headers).to eq({})
- end
+ describe '.generate_activity_metadata' do
+ subject { described_class.generate_activity_metadata(data, namespace, converter) }
- context 'with headers' do
- let(:data) { Fabricate(:api_activity_task, headers: { 'Foo' => 'Bar' }) }
+ let(:data) { Fabricate(:api_activity_task) }
+ let(:namespace) { 'test-namespace' }
- it 'assigns headers' do
- expect(subject.headers).to eq('Foo' => 'Bar')
- end
- end
+ it 'generates metadata' do
+ expect(subject.namespace).to eq(namespace)
+ expect(subject.id).to eq(data.activity_id)
+ expect(subject.name).to eq(data.activity_type.name)
+ expect(subject.task_token).to eq(data.task_token)
+ expect(subject.attempt).to eq(data.attempt)
+ expect(subject.workflow_run_id).to eq(data.workflow_execution.run_id)
+ expect(subject.workflow_id).to eq(data.workflow_execution.workflow_id)
+ expect(subject.workflow_name).to eq(data.workflow_type.name)
+ expect(subject.headers).to eq({})
end
- context 'with workflow task type' do
- let(:type) { described_class::WORKFLOW_TASK_TYPE }
- let(:data) { Fabricate(:api_workflow_task) }
- let(:namespace) { 'test-namespace' }
+ context 'with headers' do
+ let(:data) { Fabricate(:api_activity_task, headers: { 'Foo' => 'Bar' }) }
- it 'generates metadata' do
- expect(subject.namespace).to eq(namespace)
- expect(subject.id).to eq(data.started_event_id)
- expect(subject.task_token).to eq(data.task_token)
- expect(subject.attempt).to eq(data.attempt)
- expect(subject.workflow_run_id).to eq(data.workflow_execution.run_id)
- expect(subject.workflow_id).to eq(data.workflow_execution.workflow_id)
- expect(subject.workflow_name).to eq(data.workflow_type.name)
+ it 'assigns headers' do
+ expect(subject.headers).to eq('Foo' => 'Bar')
end
end
+ end
- context 'with workflow type' do
- let(:type) { described_class::WORKFLOW_TYPE }
- let(:data) { Fabricate(:api_workflow_execution_started_event_attributes) }
- let(:namespace) { nil }
+ describe '.generate_workflow_task_metadata' do
+ subject { described_class.generate_workflow_task_metadata(data, namespace) }
- it 'generates metadata' do
- expect(subject.run_id).to eq(data.original_execution_run_id)
- expect(subject.attempt).to eq(data.attempt)
- expect(subject.headers).to eq({})
- end
+ let(:data) { Fabricate(:api_workflow_task) }
+ let(:namespace) { 'test-namespace' }
- context 'with headers' do
- let(:data) do
- Fabricate(:api_workflow_execution_started_event_attributes, headers: { 'Foo' => 'Bar' })
- end
+ it 'generates metadata' do
+ expect(subject.namespace).to eq(namespace)
+ expect(subject.id).to eq(data.started_event_id)
+ expect(subject.task_token).to eq(data.task_token)
+ expect(subject.attempt).to eq(data.attempt)
+ expect(subject.workflow_run_id).to eq(data.workflow_execution.run_id)
+ expect(subject.workflow_id).to eq(data.workflow_execution.workflow_id)
+ expect(subject.workflow_name).to eq(data.workflow_type.name)
+ end
+ end
- it 'assigns headers' do
- expect(subject.headers).to eq('Foo' => 'Bar')
- end
- end
+ context '.generate_workflow_metadata' do
+ subject { described_class.generate_workflow_metadata(event, task_metadata, converter) }
+ let(:event) { Temporal::Workflow::History::Event.new(Fabricate(:api_workflow_execution_started_event)) }
+ let(:task_metadata) { Fabricate(:workflow_task_metadata) }
+ let(:namespace) { nil }
+
+ it 'generates metadata' do
+ expect(subject.run_id).to eq(event.attributes.original_execution_run_id)
+ expect(subject.id).to eq(task_metadata.workflow_id)
+ expect(subject.attempt).to eq(event.attributes.attempt)
+ expect(subject.headers).to eq({})
+ expect(subject.memo).to eq({})
+ expect(subject.namespace).to eq(task_metadata.namespace)
+ expect(subject.task_queue).to eq(event.attributes.task_queue.name)
+ expect(subject.run_started_at).to eq(event.timestamp)
end
- context 'with unknown type' do
- let(:type) { :unknown }
- let(:data) { nil }
- let(:namespace) { nil }
+ context 'with headers' do
+ let(:event) do
+ Temporal::Workflow::History::Event.new(
+ Fabricate(:api_workflow_execution_started_event, headers: { 'Foo' => 'Bar' })
+ )
+ end
- it 'raises' do
- expect { subject }.to raise_error(Temporal::InternalError, 'Unsupported metadata type')
+ it 'assigns headers' do
+ expect(subject.headers).to eq('Foo' => 'Bar')
end
end
end
diff --git a/spec/unit/lib/temporal/middleware/header_propagation_chain.rb b/spec/unit/lib/temporal/middleware/header_propagation_chain.rb
new file mode 100644
index 00000000..c7f8c522
--- /dev/null
+++ b/spec/unit/lib/temporal/middleware/header_propagation_chain.rb
@@ -0,0 +1,51 @@
+require 'temporal/middleware/header_propagator_chain'
+require 'temporal/middleware/entry'
+
+describe Temporal::Middleware::HeaderPropagatorChain do
+ class TestHeaderPropagator
+ attr_reader :id
+
+ def initialize(id)
+ @id = id
+ end
+
+ def inject!(header)
+ header['first'] = id unless header.has_key? :first
+ header[id] = id
+ end
+ end
+
+ describe '#inject' do
+ subject { described_class.new(propagators) }
+ let(:headers) { { 'test' => 'header' } }
+
+ context 'with propagators' do
+ let(:propagators) do
+ [
+ propagator_1,
+ propagator_2,
+ ]
+ end
+ let(:propagator_1) { Temporal::Middleware::Entry.new(TestHeaderPropagator, '1') }
+ let(:propagator_2) { Temporal::Middleware::Entry.new(TestHeaderPropagator, '2') }
+
+ it 'calls each propagator in order' do
+ expected = {
+ 'test' => 'header',
+ 'first' => '1',
+ '1' => '1',
+ '2' => '2',
+ }
+ expect(subject.inject(headers)).to eq(expected)
+ end
+ end
+
+ context 'without propagators' do
+ let(:propagators) { [] }
+
+ it 'returns the result of the passed block' do
+ expect(subject.inject(headers)).to eq(headers)
+ end
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/scheduled_thread_pool_spec.rb b/spec/unit/lib/temporal/scheduled_thread_pool_spec.rb
new file mode 100644
index 00000000..56fab272
--- /dev/null
+++ b/spec/unit/lib/temporal/scheduled_thread_pool_spec.rb
@@ -0,0 +1,160 @@
+require 'temporal/scheduled_thread_pool'
+
+describe Temporal::ScheduledThreadPool do
+ before do
+ allow(Temporal.metrics).to receive(:gauge)
+ end
+
+ let(:config) { Temporal::Configuration.new }
+ let(:size) { 2 }
+ let(:tags) { { foo: 'bar', bat: 'baz' } }
+ let(:thread_pool) { described_class.new(size, config, tags) }
+
+ describe '#schedule' do
+ it 'executes one task with zero delay on a thread and exits' do
+ times = 0
+
+ thread_pool.schedule(:foo, 0) do
+ times += 1
+ end
+
+ thread_pool.shutdown
+
+ expect(times).to eq(1)
+ end
+
+ it 'executes tasks with delays in time order' do
+ answers = Queue.new
+
+ thread_pool.schedule(:second, 0.2) do
+ answers << :second
+ end
+
+ thread_pool.schedule(:first, 0.1) do
+ answers << :first
+ end
+
+ thread_pool.shutdown
+
+ expect(answers.size).to eq(2)
+ expect(answers.pop).to eq(:first)
+ expect(answers.pop).to eq(:second)
+ end
+
+ it 'error does not exit' do
+ times = 0
+
+ thread_pool.schedule(:foo, 0) do
+ times += 1
+ raise 'foo'
+ end
+
+ thread_pool.shutdown
+
+ expect(times).to eq(1)
+ end
+
+ it 'exception does exit' do
+ Thread.report_on_exception = false
+ times = 0
+
+ thread_pool.schedule(:foo, 0) do
+ times += 1
+ raise Exception, 'crash'
+ end
+
+ begin
+ thread_pool.shutdown
+ raise 'should not be reached'
+ rescue Exception => e
+ 'ok'
+ end
+
+ expect(times).to eq(1)
+ end
+ end
+
+ describe '#cancel' do
+ it 'cancels already waiting task' do
+ answers = Queue.new
+ handles = []
+
+ handles << thread_pool.schedule(:foo, 30) do
+ answers << :foo
+ end
+
+ handles << thread_pool.schedule(:bar, 30) do
+ answers << :bar
+ end
+
+ # Even though this has no wait, it will be blocked by the above
+ # two long running tasks until one is finished or cancels.
+ handles << thread_pool.schedule(:baz, 0) do
+ answers << :baz
+ end
+
+ # Canceling one waiting item (foo) will let a blocked one (baz) through
+ thread_pool.cancel(handles[0])
+
+ # Canceling the other waiting item (bar) will prevent it from blocking
+ # on shutdown
+ thread_pool.cancel(handles[1])
+
+ thread_pool.shutdown
+
+ expect(answers.size).to eq(1)
+ expect(answers.pop).to eq(:baz)
+ end
+
+ it 'cancels blocked task' do
+ times = 0
+ handles = []
+
+ handles << thread_pool.schedule(:foo, 30) do
+ times += 1
+ end
+
+ handles << thread_pool.schedule(:bar, 30) do
+ times += 1
+ end
+
+ # Even though this has no wait, it will be blocked by the above
+ # two long running tasks. This test ensures it can be canceled
+ # even while waiting to run.
+ handles << thread_pool.schedule(:baz, 0) do
+ times += 1
+ end
+
+ # Cancel this one before it can start running
+ thread_pool.cancel(handles[0])
+
+ # Cancel the others so that they don't block shutdown
+ thread_pool.cancel(handles[1])
+ thread_pool.cancel(handles[2])
+
+ thread_pool.shutdown
+
+ expect(times).to eq(0)
+ end
+ end
+
+ describe '#new' do
+ it 'reports thread available metrics' do
+ thread_pool.schedule(:foo, 0) do
+ end
+
+ thread_pool.shutdown
+
+ # Thread behavior is not deterministic. Ensure the calls match without
+ # verifying exact gauge values.
+ expect(Temporal.metrics)
+ .to have_received(:gauge)
+ .with(
+ Temporal::MetricKeys::THREAD_POOL_AVAILABLE_THREADS,
+ instance_of(Integer),
+ tags
+ )
+ .at_least(:once)
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/testing/local_workflow_context_spec.rb b/spec/unit/lib/temporal/testing/local_workflow_context_spec.rb
index 29b4b2a3..75600fb3 100644
--- a/spec/unit/lib/temporal/testing/local_workflow_context_spec.rb
+++ b/spec/unit/lib/temporal/testing/local_workflow_context_spec.rb
@@ -1,24 +1,41 @@
require 'temporal/testing'
require 'temporal/workflow'
require 'temporal/api/errordetails/v1/message_pb'
+require 'time'
describe Temporal::Testing::LocalWorkflowContext do
let(:workflow_id) { 'workflow_id_1' }
+ let(:workflow_name) { 'HelloWorldWorkflow' }
let(:run_id) { 'run_id_1' }
let(:execution) { Temporal::Testing::WorkflowExecution.new }
+ let(:task_queue) { 'my_test_queue' }
+ let(:config) { Temporal::Configuration.new }
let(:workflow_context) do
Temporal::Testing::LocalWorkflowContext.new(
execution,
workflow_id,
run_id,
[],
- Temporal::Metadata::Workflow.new(name: workflow_id, run_id: run_id, attempt: 1)
+ Temporal::Metadata::Workflow.new(
+ namespace: 'ruby-samples',
+ id: workflow_id,
+ name: workflow_name,
+ run_id: run_id,
+ parent_id: nil,
+ parent_run_id: nil,
+ attempt: 1,
+ task_queue: task_queue,
+ headers: {},
+ run_started_at: Time.now,
+ memo: {},
+ ),
+ config
)
end
let(:async_token) do
# Generate the async token
Temporal::Activity::AsyncToken.encode(
- Temporal.configuration.namespace,
+ config.namespace,
1, # activity ID starts at 1 for each workflow
workflow_id,
run_id
@@ -49,6 +66,14 @@ def execute
end
end
+ class MetadataCapturingActivity < Temporal::Activity
+ def execute
+ # activity.metadata is private, which we work around in order to write unit tests that
+ # can observe activity metadata
+ activity.send :metadata
+ end
+ end
+
describe '#execute_activity' do
describe 'outcome is captured in the future' do
it 'delay failure' do
@@ -120,10 +145,112 @@ def execute
result = workflow_context.execute_activity!(TestActivity)
expect(result).to eq('ok')
end
+
+ it 'can heartbeat' do
+ # Heartbeat doesn't do anything in local mode, but at least it can be called.
+ workflow_context.execute_activity!(TestHeartbeatingActivity)
+ end
+
+ it 'has accurate metadata' do
+ result = workflow_context.execute_activity!(MetadataCapturingActivity)
+ expect(result.attempt).to eq(1)
+ expect(result.headers).to eq({})
+ expect(result.id).to eq(1)
+ expect(result.name).to eq('MetadataCapturingActivity')
+ expect(result.namespace).to eq('default-namespace')
+ expect(result.workflow_id).to eq(workflow_id)
+ expect(result.workflow_name).to eq(workflow_name)
+ expect(result.workflow_run_id).to eq(run_id)
+ end
end
- it 'can heartbeat' do
- # Heartbeat doesn't do anything in local mode, but at least it can be called.
- workflow_context.execute_activity!(TestHeartbeatingActivity)
+ describe '#wait_for' do
+ it 'await unblocks once condition changes' do
+ can_continue = false
+ exited = false
+ fiber = Fiber.new do
+ workflow_context.wait_until do
+ can_continue
+ end
+
+ exited = true
+ end
+
+ fiber.resume # start running
+ expect(exited).to eq(false)
+
+ can_continue = true # change condition
+ fiber.resume # resume running after the Fiber.yield done in context.await
+ expect(exited).to eq(true)
+ end
+
+ it 'condition or future unblocks' do
+ exited = false
+
+ future = workflow_context.execute_activity(TestAsyncActivity)
+
+ fiber = Fiber.new do
+ workflow_context.wait_for_any(future) do
+ false
+ end
+
+ exited = true
+ end
+
+ fiber.resume # start running
+ expect(exited).to eq(false)
+
+ execution.complete_activity(async_token, 'async_ok')
+
+ fiber.resume # resume running after the Fiber.yield done in context.await
+ expect(exited).to eq(true)
+ end
+
+ it 'any future unblocks' do
+ exited = false
+
+ async_future = workflow_context.execute_activity(TestAsyncActivity)
+ future = workflow_context.execute_activity(TestActivity)
+ future.wait
+
+ fiber = Fiber.new do
+ workflow_context.wait_for_any(future, async_future)
+ exited = true
+ end
+
+ fiber.resume # start running
+ expect(exited).to eq(true)
+ end
+
+ describe '#upsert_search_attributes' do
+ it 'can be run' do
+ workflow_context.upsert_search_attributes({'CustomKeywordField' => 'moo'})
+ end
+
+ it 'does not accept nil' do
+ expect do
+ workflow_context.upsert_search_attributes(nil)
+ end.to raise_error(ArgumentError, 'search_attributes cannot be nil')
+ end
+
+ it 'requires a hash' do
+ expect do
+ workflow_context.upsert_search_attributes(['array_not_supported'])
+ end.to raise_error(ArgumentError, 'for search_attributes, expecting a Hash, not Array')
+ end
+
+ it 'requires a non-empty hash' do
+ expect do
+ workflow_context.upsert_search_attributes({})
+ end.to raise_error(ArgumentError, 'Cannot upsert an empty hash for search_attributes, as this would do nothing.')
+ end
+
+ it 'converts a Time to the ISO8601 UTC format expected by the Temporal server' do
+ time = Time.now
+ expect(
+ workflow_context.upsert_search_attributes({'CustomDatetimeField' => time})
+ ).to eq({ 'CustomDatetimeField' => time.utc.iso8601 })
+ end
+ end
end
end
diff --git a/spec/unit/lib/temporal/testing/replay_histories/do_nothing.json b/spec/unit/lib/temporal/testing/replay_histories/do_nothing.json
new file mode 100644
index 00000000..45a0ef20
--- /dev/null
+++ b/spec/unit/lib/temporal/testing/replay_histories/do_nothing.json
@@ -0,0 +1,103 @@
+{
+ "events":[
+ {
+ "eventId":"1",
+ "eventTime":"2024-05-27T18:53:53.483530640Z",
+ "eventType":"EVENT_TYPE_WORKFLOW_EXECUTION_STARTED",
+ "taskId":"27263213",
+ "workflowExecutionStartedEventAttributes":{
+ "workflowType":{
+ "name":"TestReplayWorkflow"
+ },
+ "taskQueue":{
+ "name":"general",
+ "kind":"TASK_QUEUE_KIND_NORMAL"
+ },
+ "input":{
+ "payloads":[
+ {
+ "metadata":{
+ "encoding":"anNvbi9wbGFpbg=="
+ },
+ "data":"eyI6cmVzdWx0Ijoic3VjY2VzcyJ9Cg=="
+ }
+ ]
+ },
+ "workflowExecutionTimeout":"30s",
+ "workflowRunTimeout":"30s",
+ "workflowTaskTimeout":"10s",
+ "originalExecutionRunId":"b3711f7b-2693-4c1b-ab67-24e73f80bdcf",
+ "identity":"123@test",
+ "firstExecutionRunId":"b3711f7b-2693-4c1b-ab67-24e73f80bdcf",
+ "attempt":1,
+ "workflowExecutionExpirationTime":"2024-05-27T18:54:23.483Z",
+ "firstWorkflowTaskBackoff":"0s",
+ "memo":{},
+ "searchAttributes":{},
+ "header":{}
+ }
+ },
+ {
+ "eventId":"2",
+ "eventTime":"2024-05-27T18:53:53.483621296Z",
+ "eventType":"EVENT_TYPE_WORKFLOW_TASK_SCHEDULED",
+ "taskId":"27263215",
+ "workflowTaskScheduledEventAttributes":{
+ "taskQueue":{
+ "name":"general",
+ "kind":"TASK_QUEUE_KIND_NORMAL"
+ },
+ "startToCloseTimeout":"10s",
+ "attempt":1
+ }
+ },
+ {
+ "eventId":"3",
+ "eventTime":"2024-05-27T18:53:53.504351823Z",
+ "eventType":"EVENT_TYPE_WORKFLOW_TASK_STARTED",
+ "taskId":"27263220",
+ "workflowTaskStartedEventAttributes":{
+ "scheduledEventId":"2",
+ "identity":"123@test",
+ "requestId":"195003c8-4c89-486b-8ae8-85cb209dc8b9",
+ "historySizeBytes":"395"
+ }
+ },
+ {
+ "eventId":"4",
+ "eventTime":"2024-05-27T18:53:53.620416193Z",
+ "eventType":"EVENT_TYPE_WORKFLOW_TASK_COMPLETED",
+ "taskId":"27263224",
+ "workflowTaskCompletedEventAttributes":{
+ "scheduledEventId":"2",
+ "startedEventId":"3",
+ "identity":"123@test",
+ "binaryChecksum":"d1feac6b4ac2fb57a304ddf1419efd6e06088e41",
+ "sdkMetadata":{
+ "langUsedFlags":[
+ 2
+ ]
+ }
+ }
+ },
+ {
+ "eventId":"5",
+ "eventTime":"2024-05-27T18:53:55.790974964Z",
+ "eventType":"EVENT_TYPE_WORKFLOW_EXECUTION_COMPLETED",
+ "taskId":"27263260",
+ "workflowExecutionCompletedEventAttributes":{
+ "result":{
+ "payloads":[
+ {
+ "metadata":{
+ "encoding":"anNvbi9wbGFpbg=="
+ },
+ "data":"ImRvbmUiCg=="
+ }
+ ]
+ },
+ "workflowTaskCompletedEventId":"4"
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/spec/unit/lib/temporal/testing/replay_tester_spec.rb b/spec/unit/lib/temporal/testing/replay_tester_spec.rb
new file mode 100644
index 00000000..861a5813
--- /dev/null
+++ b/spec/unit/lib/temporal/testing/replay_tester_spec.rb
@@ -0,0 +1,142 @@
+require "base64"
+require "json"
+require "temporal/testing/replay_tester"
+require "temporal/workflow"
+require "temporal/workflow/history"
+
+describe Temporal::Testing::ReplayTester do
+ class TestReplayActivity < Temporal::Activity
+ def execute
+ raise "should never run"
+ end
+ end
+
+ class TestReplayWorkflow < Temporal::Workflow
+ def execute(run_activity: false, run_sleep: false, result: "success")
+ TestReplayActivity.execute! if run_activity
+
+ workflow.sleep(1) if run_sleep
+
+ case result
+ when "success"
+ "done"
+ when "continue_as_new"
+ workflow.continue_as_new
+ nil
+ when "await"
+ # wait forever
+ workflow.wait_until { false }
+ when "fail"
+ raise "failed"
+ end
+ end
+ end
+
+ let(:replay_tester) { Temporal::Testing::ReplayTester.new }
+ let(:do_nothing_json) do
+ File.read(
+ "spec/unit/lib/temporal/testing/replay_histories/do_nothing.json"
+ )
+ end
+
+ let(:do_nothing) do
+ Temporal::Workflow::History::Serialization.from_json(do_nothing_json)
+ end
+
+ it "replay do nothing successful" do
+ replay_tester.replay_history(
+ TestReplayWorkflow,
+ do_nothing
+ )
+ end
+
+ def remove_first_history_event(history)
+ history.events.shift
+ history
+ end
+
+ it "replay missing start workflow execution event" do
+ replay_tester.replay_history(
+ TestReplayWorkflow,
+ remove_first_history_event(do_nothing)
+ )
+ raise "Expected error to raise"
+ rescue Temporal::Testing::ReplayError => e
+ expect(e.message).to(eq("History does not start with workflow_execution_started event"))
+ end
+
+ def set_workflow_args_in_history(json_args)
+ obj = JSON.load(do_nothing_json)
+ obj["events"][0]["workflowExecutionStartedEventAttributes"]["input"]["payloads"][0]["data"] = Base64.strict_encode64(
+ json_args
+ )
+ new_json = JSON.generate(obj)
+ Temporal::Workflow::History::Serialization.from_json(new_json)
+ end
+
+ it "replay extra activity" do
+ # The linked history will cause an error because it will cause an activity run even though
+ # there isn't one in the history.
+
+ replay_tester.replay_history(
+ TestReplayWorkflow,
+ set_workflow_args_in_history("{\":run_activity\":true}")
+ )
+ raise "Expected error to raise"
+ rescue Temporal::Testing::ReplayError => e
+ expect(e.message).to(eq("Workflow code failed to replay successfully against history"))
+ # Ensure backtrace was overwritten
+ expect(e.backtrace.first).to(start_with("Fiber backtraces:"))
+ expect(e.cause).to(be_a(Temporal::NonDeterministicWorkflowError))
+ expect(e.cause.message).to(
+ eq(
+ "Unexpected command. The replaying code is issuing: activity (5), but the history of previous executions " \
+ "recorded: complete_workflow (5). Likely, either you have made a version-unsafe change to your workflow or " \
+ "have non-deterministic behavior in your workflow. See https://docs.temporal.io/docs/java/versioning/#introduction-to-versioning."
+ )
+ )
+ end
+
+ it "replay continues as new when history completed" do
+ # The linked history will cause an error because it will cause the workflow to continue
+ # as new on replay when in the history, it completed successfully.
+
+ replay_tester.replay_history(
+ TestReplayWorkflow,
+ set_workflow_args_in_history("{\":result\":\"continue_as_new\"}")
+ )
+ raise "Expected error to raise"
+ rescue Temporal::Testing::ReplayError => e
+ expect(e.message).to(eq("Workflow code failed to replay successfully against history"))
+ expect(e.cause).to(be_a(Temporal::NonDeterministicWorkflowError))
+ expect(e.cause.message).to(
+ eq(
+ "Unexpected command. The replaying code is issuing: continue_as_new_workflow (5), but the history of " \
+ "previous executions recorded: complete_workflow (5). Likely, either you have made a version-unsafe " \
+ "change to your workflow or have non-deterministic behavior in your workflow. " \
+ "See https://docs.temporal.io/docs/java/versioning/#introduction-to-versioning."
+ )
+ )
+ end
+
+ it "replay keeps going when history succeeded" do
+ # The linked history will cause an error because it will cause the workflow to keep running
+ # when in the history, it completed successfully.
+
+ replay_tester.replay_history(
+ TestReplayWorkflow,
+ set_workflow_args_in_history("{\":result\":\"await\"}")
+ )
+ raise "Expected error to raise"
+ rescue Temporal::Testing::ReplayError => e
+ expect(e.message).to(eq("Workflow code failed to replay successfully against history"))
+ expect(e.cause).to(be_a(Temporal::NonDeterministicWorkflowError))
+ expect(e.cause.message).to(
+ eq(
+ "A command in the history of previous executions, complete_workflow (5), was not scheduled upon replay. " \
+ "Likely, either you have made a version-unsafe change to your workflow or have non-deterministic behavior " \
+ "in your workflow. See https://docs.temporal.io/docs/java/versioning/#introduction-to-versioning."
+ )
+ )
+ end
+end
diff --git a/spec/unit/lib/temporal/testing/temporal_override_spec.rb b/spec/unit/lib/temporal/testing/temporal_override_spec.rb
index 09e4eeee..4d8bef64 100644
--- a/spec/unit/lib/temporal/testing/temporal_override_spec.rb
+++ b/spec/unit/lib/temporal/testing/temporal_override_spec.rb
@@ -13,10 +13,19 @@ class TestTemporalOverrideWorkflow < Temporal::Workflow
def execute; end
end
+ class UpsertSearchAttributesWorkflow < Temporal::Workflow
+ namespace 'default-namespace'
+ task_queue 'default-task-queue'
+
+ def execute
+ workflow.upsert_search_attributes('CustomIntField' => 5)
+ end
+ end
+
context 'when testing mode is disabled' do
describe 'Temporal.start_workflow' do
let(:connection) { instance_double('Temporal::Connection::GRPC') }
- let(:response) { Temporal::Api::WorkflowService::V1::StartWorkflowExecutionResponse.new(run_id: 'xxx') }
+ let(:response) { Temporalio::Api::WorkflowService::V1::StartWorkflowExecutionResponse.new(run_id: 'xxx') }
before { allow(Temporal::Connection).to receive(:generate).and_return(connection) }
after { client.remove_instance_variable(:@connection) rescue NameError }
@@ -136,6 +145,14 @@ def execute
.with(an_instance_of(Temporal::Testing::LocalWorkflowContext))
end
+ it 'explicitly does not support staring a workflow with a signal' do
+ expect {
+ client.start_workflow(TestTemporalOverrideWorkflow, options: { signal_name: 'breakme' })
+ }.to raise_error(NotImplementedError) do |e|
+ expect(e.message).to eql("Signals are not available when Temporal::Testing.local! is on")
+ end
+ end
+
describe 'execution control' do
subject do
client.start_workflow(
@@ -166,7 +183,7 @@ def execute
end
context 'when workflow is started' do
- let(:status) { Temporal::Workflow::ExecutionInfo::RUNNING_STATUS }
+ let(:status) { Temporal::Workflow::Status::RUNNING }
it 'raises error' do
expect { subject }.to raise_error(error_class) { |e| expect(e.run_id).to eql(run_id) }
@@ -174,7 +191,7 @@ def execute
end
context 'when workflow has completed' do
- let(:status) { Temporal::Workflow::ExecutionInfo::COMPLETED_STATUS }
+ let(:status) { Temporal::Workflow::Status::COMPLETED }
it 'raises error' do
expect { subject }.to raise_error(error_class) { |e| expect(e.run_id).to eql(run_id) }
@@ -182,7 +199,7 @@ def execute
end
context 'when workflow has failed' do
- let(:status) { Temporal::Workflow::ExecutionInfo::FAILED_STATUS }
+ let(:status) { Temporal::Workflow::Status::FAILED }
it { is_expected.to be_a(String) }
end
@@ -198,7 +215,7 @@ def execute
end
context 'when workflow is started' do
- let(:status) { Temporal::Workflow::ExecutionInfo::RUNNING_STATUS }
+ let(:status) { Temporal::Workflow::Status::RUNNING }
it 'raises error' do
expect { subject }.to raise_error(error_class) { |e| expect(e.run_id).to eql(run_id) }
@@ -206,13 +223,13 @@ def execute
end
context 'when workflow has completed' do
- let(:status) { Temporal::Workflow::ExecutionInfo::COMPLETED_STATUS }
+ let(:status) { Temporal::Workflow::Status::COMPLETED }
it { is_expected.to be_a(String) }
end
context 'when workflow has failed' do
- let(:status) { Temporal::Workflow::ExecutionInfo::FAILED_STATUS }
+ let(:status) { Temporal::Workflow::Status::FAILED }
it { is_expected.to be_a(String) }
end
@@ -228,7 +245,7 @@ def execute
end
context 'when workflow is started' do
- let(:status) { Temporal::Workflow::ExecutionInfo::RUNNING_STATUS }
+ let(:status) { Temporal::Workflow::Status::RUNNING }
it 'raises error' do
expect { subject }.to raise_error(error_class) { |e| expect(e.run_id).to eql(run_id) }
@@ -236,7 +253,7 @@ def execute
end
context 'when workflow has completed' do
- let(:status) { Temporal::Workflow::ExecutionInfo::COMPLETED_STATUS }
+ let(:status) { Temporal::Workflow::Status::COMPLETED }
it 'raises error' do
expect { subject }.to raise_error(error_class) { |e| expect(e.run_id).to eql(run_id) }
@@ -244,7 +261,7 @@ def execute
end
context 'when workflow has failed' do
- let(:status) { Temporal::Workflow::ExecutionInfo::FAILED_STATUS }
+ let(:status) { Temporal::Workflow::Status::FAILED }
it 'raises error' do
expect { subject }.to raise_error(error_class) { |e| expect(e.run_id).to eql(run_id) }
@@ -252,6 +269,29 @@ def execute
end
end
end
+
+ describe 'Temporal.fetch_workflow_execution_info' do
+ it 'retrieves search attributes' do
+ workflow_id = 'upsert_search_attributes_test_wf-' + SecureRandom.uuid
+
+ run_id = client.start_workflow(
+ UpsertSearchAttributesWorkflow,
+ options: {
+ workflow_id: workflow_id,
+ search_attributes: {
+ 'AdditionalSearchAttribute' => 189,
+ },
+ },
+ )
+
+ info = client.fetch_workflow_execution_info('default-namespace', workflow_id, run_id)
+ expect(info.search_attributes).to eq({
+ 'CustomIntField' => 5,
+ 'AdditionalSearchAttribute' => 189,
+ })
+ end
+
+ end
end
end
end
diff --git a/spec/unit/lib/temporal/thread_pool_spec.rb b/spec/unit/lib/temporal/thread_pool_spec.rb
new file mode 100644
index 00000000..5de5b03a
--- /dev/null
+++ b/spec/unit/lib/temporal/thread_pool_spec.rb
@@ -0,0 +1,75 @@
+require 'temporal/thread_pool'
+
+describe Temporal::ThreadPool do
+ before do
+ allow(Temporal.metrics).to receive(:gauge)
+ end
+
+ let(:config) { Temporal::Configuration.new }
+ let(:size) { 2 }
+ let(:tags) { { foo: 'bar', bat: 'baz' } }
+ let(:thread_pool) { described_class.new(size, config, tags) }
+
+ describe '#new' do
+ it 'executes one task on a thread and exits' do
+ times = 0
+
+ thread_pool.schedule do
+ times += 1
+ end
+
+ thread_pool.shutdown
+
+ expect(times).to eq(1)
+ end
+
+ it 'handles error without exiting' do
+ times = 0
+
+ thread_pool.schedule do
+ times += 1
+ raise 'failure'
+ end
+
+ thread_pool.shutdown
+
+ expect(times).to eq(1)
+ end
+
+ it 'handles exception with exiting' do
+ Thread.report_on_exception = false
+ times = 0
+
+ thread_pool.schedule do
+ times += 1
+ raise Exception, 'crash'
+ end
+
+ begin
+ thread_pool.shutdown
+ rescue Exception => e
+ 'ok'
+ end
+
+ expect(times).to eq(1)
+ end
+
+ it 'reports thread available metrics' do
+ thread_pool.schedule do
+ end
+
+ thread_pool.shutdown
+
+ # Thread behavior is not deterministic. Ensure the calls match without
+ # verifying exact gauge values.
+ expect(Temporal.metrics)
+ .to have_received(:gauge)
+ .with(
+ Temporal::MetricKeys::THREAD_POOL_AVAILABLE_THREADS,
+ instance_of(Integer),
+ tags
+ )
+ .at_least(:once)
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/worker_spec.rb b/spec/unit/lib/temporal/worker_spec.rb
index d986958a..2c379567 100644
--- a/spec/unit/lib/temporal/worker_spec.rb
+++ b/spec/unit/lib/temporal/worker_spec.rb
@@ -6,12 +6,30 @@
describe Temporal::Worker do
subject { described_class.new(config) }
let(:config) { Temporal::Configuration.new }
+ let(:connection) { instance_double('Temporal::Connection::GRPC') }
+ let(:sdk_metadata_enabled) { true }
+ before do
+ allow(Temporal::Connection).to receive(:generate).and_return(connection)
+ allow(connection).to receive(:get_system_info).and_return(
+ Temporalio::Api::WorkflowService::V1::GetSystemInfoResponse.new(
+ server_version: 'test',
+ capabilities: Temporalio::Api::WorkflowService::V1::GetSystemInfoResponse::Capabilities.new(
+ sdk_metadata: sdk_metadata_enabled
+ )
+ )
+ )
+ end
class TestWorkerWorkflow < Temporal::Workflow
namespace 'default-namespace'
task_queue 'default-task-queue'
end
+ class OtherTestWorkerWorkflow < Temporal::Workflow
+ namespace 'default-namespace'
+ task_queue 'default-task-queue'
+ end
+
class TestWorkerActivity < Temporal::Activity
namespace 'default-namespace'
task_queue 'default-task-queue'
@@ -25,9 +43,14 @@ class TestWorkerActivityMiddleware
def call(_); end
end
- class TestWorkerActivity < Temporal::Activity
+ class TestWorkerWorkflowMiddleware
+ def call(_); end
+ end
+
+ class OtherTestWorkerActivity < Temporal::Activity
namespace 'default-namespace'
task_queue 'default-task-queue'
+
end
THREAD_SYNC_DELAY = 0.01
@@ -63,20 +86,52 @@ class TestWorkerActivity < Temporal::Activity
end
end
+ describe '#register_dynamic_workflow' do
+ let(:workflow_keys) { subject.send(:workflows).keys }
+
+ it 'registers a dynamic workflow with the provided config options' do
+ lookup = instance_double(Temporal::ExecutableLookup, add: nil)
+ expect(Temporal::ExecutableLookup).to receive(:new).and_return(lookup)
+ expect(lookup).to receive(:add_dynamic).with('test-dynamic-workflow', TestWorkerWorkflow)
+
+ subject.register_dynamic_workflow(
+ TestWorkerWorkflow,
+ name: 'test-dynamic-workflow',
+ namespace: 'test-namespace',
+ task_queue: 'test-task-queue'
+ )
+
+ expect(workflow_keys).to include(['test-namespace', 'test-task-queue'])
+ end
+
+ it 'cannot double-register a workflow' do
+ subject.register_dynamic_workflow(TestWorkerWorkflow)
+ expect do
+ subject.register_dynamic_workflow(OtherTestWorkerWorkflow)
+ end.to raise_error(
+ Temporal::SecondDynamicWorkflowError,
+ 'Temporal::Worker#register_dynamic_workflow: cannot register OtherTestWorkerWorkflow dynamically; ' \
+ 'TestWorkerWorkflow was already registered dynamically for task queue \'default-task-queue\', ' \
+ 'and there can be only one.'
+ )
+ end
+ end
+
describe '#register_activity' do
let(:lookup) { instance_double(Temporal::ExecutableLookup, add: nil) }
let(:activity_keys) { subject.send(:activities).keys }
- before { expect(Temporal::ExecutableLookup).to receive(:new).and_return(lookup) }
-
it 'registers an activity based on the default config options' do
+ expect(Temporal::ExecutableLookup).to receive(:new).and_return(lookup)
subject.register_activity(TestWorkerActivity)
expect(lookup).to have_received(:add).with('TestWorkerActivity', TestWorkerActivity)
- expect(activity_keys).to include(['default-namespace', 'default-task-queue'])
+ expect(activity_keys).to include(%w[default-namespace default-task-queue])
end
it 'registers an activity with provided config options' do
+ expect(Temporal::ExecutableLookup).to receive(:new).and_return(lookup)
+
subject.register_activity(
TestWorkerActivity,
name: 'test-activity',
@@ -85,10 +140,43 @@ class TestWorkerActivity < Temporal::Activity
)
expect(lookup).to have_received(:add).with('test-activity', TestWorkerActivity)
- expect(activity_keys).to include(['test-namespace', 'test-task-queue'])
+ expect(activity_keys).to include(%w[test-namespace test-task-queue])
end
end
+ describe '#register_dynamic_activity' do
+ let(:activity_keys) { subject.send(:activities).keys }
+
+ it 'registers a dynamic activity with the provided config options' do
+ lookup = instance_double(Temporal::ExecutableLookup, add: nil)
+ expect(Temporal::ExecutableLookup).to receive(:new).and_return(lookup)
+ expect(lookup).to receive(:add_dynamic).with('test-dynamic-activity', TestWorkerActivity)
+
+ subject.register_dynamic_activity(
+ TestWorkerActivity,
+ name: 'test-dynamic-activity',
+ namespace: 'test-namespace',
+ task_queue: 'test-task-queue'
+ )
+
+ expect(activity_keys).to include(%w[test-namespace test-task-queue])
+ end
+
+ it 'cannot double-register an activity' do
+ subject.register_dynamic_activity(TestWorkerActivity)
+ expect do
+ subject.register_dynamic_activity(OtherTestWorkerActivity)
+ end.to raise_error(
+ Temporal::SecondDynamicActivityError,
+ 'Temporal::Worker#register_dynamic_activity: cannot register OtherTestWorkerActivity dynamically; ' \
+ 'TestWorkerActivity was already registered dynamically for task queue \'default-task-queue\', ' \
+ 'and there can be only one.'
+ )
+ end
+
+
+ end
+
describe '#add_workflow_task_middleware' do
let(:middleware) { subject.send(:workflow_task_middleware) }
@@ -127,15 +215,45 @@ class TestWorkerActivity < Temporal::Activity
end
end
+ def start_and_stop(worker)
+ allow(worker).to receive(:on_started_hook) {
+ worker.stop
+ }
+ stopped = false
+ allow(worker).to receive(:on_stopped_hook) {
+ stopped = true
+ }
+
+ thread = Thread.new do
+ Thread.current.abort_on_exception = true
+ worker.start
+ end
+
+ while !stopped
+ sleep(THREAD_SYNC_DELAY)
+ end
+ thread
+ end
+
+ describe 'start and stop' do
+ it 'can stop before starting' do
+ expect(Temporal::Workflow::Poller)
+ .to_not receive(:new)
+ expect(Temporal::Activity::Poller)
+ .to_not receive(:new)
+ t = Thread.new {subject.stop}
+ subject.start
+ t.join
+ end
+ end
+
describe '#start' do
- let(:workflow_poller_1) { instance_double(Temporal::Workflow::Poller, start: nil) }
- let(:workflow_poller_2) { instance_double(Temporal::Workflow::Poller, start: nil) }
- let(:activity_poller_1) { instance_double(Temporal::Activity::Poller, start: nil) }
- let(:activity_poller_2) { instance_double(Temporal::Activity::Poller, start: nil) }
+ let(:workflow_poller_1) { instance_double(Temporal::Workflow::Poller, start: nil, stop_polling: nil, cancel_pending_requests: nil, wait: nil) }
+ let(:workflow_poller_2) { instance_double(Temporal::Workflow::Poller, start: nil, stop_polling: nil, cancel_pending_requests: nil, wait: nil) }
+ let(:activity_poller_1) { instance_double(Temporal::Activity::Poller, start: nil, stop_polling: nil, cancel_pending_requests: nil, wait: nil) }
+ let(:activity_poller_2) { instance_double(Temporal::Activity::Poller, start: nil, stop_polling: nil, cancel_pending_requests: nil, wait: nil) }
it 'starts a poller for each namespace/task list combination' do
- allow(subject).to receive(:shutting_down?).and_return(true)
-
allow(Temporal::Workflow::Poller)
.to receive(:new)
.with(
@@ -144,7 +262,10 @@ class TestWorkerActivity < Temporal::Activity
an_instance_of(Temporal::ExecutableLookup),
config,
[],
- thread_pool_size: 10
+ [],
+ thread_pool_size: 10,
+ binary_checksum: nil,
+ poll_retry_seconds: 0
)
.and_return(workflow_poller_1)
@@ -156,7 +277,10 @@ class TestWorkerActivity < Temporal::Activity
an_instance_of(Temporal::ExecutableLookup),
config,
[],
- thread_pool_size: 10
+ [],
+ thread_pool_size: 10,
+ binary_checksum: nil,
+ poll_retry_seconds: 0
)
.and_return(workflow_poller_2)
@@ -168,7 +292,9 @@ class TestWorkerActivity < Temporal::Activity
an_instance_of(Temporal::ExecutableLookup),
config,
[],
- thread_pool_size: 20
+ thread_pool_size: 20,
+ poll_retry_seconds: 0,
+ max_tasks_per_second: 0
)
.and_return(activity_poller_1)
@@ -180,7 +306,9 @@ class TestWorkerActivity < Temporal::Activity
an_instance_of(Temporal::ExecutableLookup),
config,
[],
- thread_pool_size: 20
+ thread_pool_size: 20,
+ poll_retry_seconds: 0,
+ max_tasks_per_second: 0
)
.and_return(activity_poller_2)
@@ -189,7 +317,7 @@ class TestWorkerActivity < Temporal::Activity
subject.register_activity(TestWorkerActivity)
subject.register_activity(TestWorkerActivity, task_queue: 'other-task-queue')
- subject.start
+ start_and_stop(subject)
expect(workflow_poller_1).to have_received(:start)
expect(workflow_poller_2).to have_received(:start)
@@ -198,7 +326,7 @@ class TestWorkerActivity < Temporal::Activity
end
it 'can have an activity poller with a different thread pool size' do
- activity_poller = instance_double(Temporal::Activity::Poller, start: nil)
+ activity_poller = instance_double(Temporal::Activity::Poller, start: nil, stop_polling: nil, cancel_pending_requests: nil, wait: nil)
expect(Temporal::Activity::Poller)
.to receive(:new)
.with(
@@ -207,24 +335,140 @@ class TestWorkerActivity < Temporal::Activity
an_instance_of(Temporal::ExecutableLookup),
an_instance_of(Temporal::Configuration),
[],
- {thread_pool_size: 10}
+ {thread_pool_size: 10, poll_retry_seconds: 0, max_tasks_per_second: 0}
)
.and_return(activity_poller)
- worker = Temporal::Worker.new(activity_thread_pool_size: 10)
- allow(worker).to receive(:shutting_down?).and_return(true)
+ workflow_poller = instance_double(Temporal::Workflow::Poller, start: nil, stop_polling: nil, cancel_pending_requests: nil, wait: nil)
+ expect(Temporal::Workflow::Poller)
+ .to receive(:new)
+ .and_return(workflow_poller)
+
+ worker = Temporal::Worker.new(config, activity_thread_pool_size: 10)
worker.register_workflow(TestWorkerWorkflow)
worker.register_activity(TestWorkerActivity)
- worker.start
+ start_and_stop(worker)
expect(activity_poller).to have_received(:start)
+ end
+ it 'is mutually exclusive with stop' do
+ subject.register_workflow(TestWorkerWorkflow)
+ subject.register_activity(TestWorkerActivity)
+
+ allow(subject).to receive(:while_stopping_hook) do
+ # This callback is within a mutex, so this new thread shouldn't
+ # do anything until Worker.stop is complete.
+ Thread.new { subject.start }
+ sleep(THREAD_SYNC_DELAY) # give it a little time to do damage if it's going to
+ end
+ subject.stop
+ end
+
+ it 'can have a worklow poller with a binary checksum' do
+ activity_poller = instance_double(Temporal::Activity::Poller, start: nil, stop_polling: nil, cancel_pending_requests: nil, wait: nil)
+ expect(Temporal::Activity::Poller)
+ .to receive(:new)
+ .and_return(activity_poller)
+
+ workflow_poller = instance_double(Temporal::Workflow::Poller, start: nil, stop_polling: nil, cancel_pending_requests: nil, wait: nil)
+ binary_checksum = 'abc123'
+ expect(Temporal::Workflow::Poller)
+ .to receive(:new)
+ .with(
+ 'default-namespace',
+ 'default-task-queue',
+ an_instance_of(Temporal::ExecutableLookup),
+ an_instance_of(Temporal::Configuration),
+ [],
+ [],
+ thread_pool_size: 10,
+ binary_checksum: binary_checksum,
+ poll_retry_seconds: 0
+ )
+ .and_return(workflow_poller)
+
+ worker = Temporal::Worker.new(config, binary_checksum: binary_checksum)
+ worker.register_workflow(TestWorkerWorkflow)
+ worker.register_activity(TestWorkerActivity)
+
+ start_and_stop(worker)
+
+ expect(workflow_poller).to have_received(:start)
+ end
+
+ it 'can have an activity poller that sleeps after unsuccessful poll' do
+ activity_poller = instance_double(Temporal::Activity::Poller, start: nil, stop_polling: nil, cancel_pending_requests: nil, wait: nil)
+ expect(Temporal::Activity::Poller)
+ .to receive(:new)
+ .with(
+ 'default-namespace',
+ 'default-task-queue',
+ an_instance_of(Temporal::ExecutableLookup),
+ an_instance_of(Temporal::Configuration),
+ [],
+ {thread_pool_size: 20, poll_retry_seconds: 10, max_tasks_per_second: 0}
+ )
+ .and_return(activity_poller)
+
+ worker = Temporal::Worker.new(config, activity_poll_retry_seconds: 10)
+ worker.register_activity(TestWorkerActivity)
+
+ start_and_stop(worker)
+
+ expect(activity_poller).to have_received(:start)
+ end
+
+ it 'can have a workflow poller sleeping after unsuccessful poll' do
+ workflow_poller = instance_double(Temporal::Workflow::Poller, start: nil, stop_polling: nil, cancel_pending_requests: nil, wait: nil)
+ expect(Temporal::Workflow::Poller)
+ .to receive(:new)
+ .with(
+ 'default-namespace',
+ 'default-task-queue',
+ an_instance_of(Temporal::ExecutableLookup),
+ an_instance_of(Temporal::Configuration),
+ [],
+ [],
+ {binary_checksum: nil, poll_retry_seconds: 10, thread_pool_size: 10}
+ )
+ .and_return(workflow_poller)
+
+ worker = Temporal::Worker.new(config, workflow_poll_retry_seconds: 10)
+ worker.register_workflow(TestWorkerWorkflow)
+
+ start_and_stop(worker)
+
+ expect(workflow_poller).to have_received(:start)
+ end
+
+ it 'can have an activity poller that registers a task rate limit' do
+ activity_poller = instance_double(Temporal::Activity::Poller, start: nil, stop_polling: nil, cancel_pending_requests: nil, wait: nil)
+ expect(Temporal::Activity::Poller)
+ .to receive(:new)
+ .with(
+ 'default-namespace',
+ 'default-task-queue',
+ an_instance_of(Temporal::ExecutableLookup),
+ an_instance_of(Temporal::Configuration),
+ [],
+ {thread_pool_size: 20, poll_retry_seconds: 0, max_tasks_per_second: 5}
+ )
+ .and_return(activity_poller)
+
+ worker = Temporal::Worker.new(config, activity_max_tasks_per_second: 5)
+ worker.register_activity(TestWorkerActivity)
+
+ start_and_stop(worker)
+
+ expect(activity_poller).to have_received(:start)
end
context 'when middleware is configured' do
let(:entry_1) { instance_double(Temporal::Middleware::Entry) }
let(:entry_2) { instance_double(Temporal::Middleware::Entry) }
+ let(:entry_3) { instance_double(Temporal::Middleware::Entry) }
before do
allow(Temporal::Middleware::Entry)
@@ -237,13 +481,17 @@ class TestWorkerActivity < Temporal::Activity
.with(TestWorkerActivityMiddleware, [])
.and_return(entry_2)
+ allow(Temporal::Middleware::Entry)
+ .to receive(:new)
+ .with(TestWorkerWorkflowMiddleware, [])
+ .and_return(entry_3)
+
subject.add_workflow_task_middleware(TestWorkerWorkflowTaskMiddleware)
subject.add_activity_middleware(TestWorkerActivityMiddleware)
+ subject.add_workflow_middleware(TestWorkerWorkflowMiddleware)
end
it 'starts pollers with correct middleware' do
- allow(subject).to receive(:shutting_down?).and_return(true)
-
allow(Temporal::Workflow::Poller)
.to receive(:new)
.with(
@@ -252,7 +500,10 @@ class TestWorkerActivity < Temporal::Activity
an_instance_of(Temporal::ExecutableLookup),
config,
[entry_1],
- thread_pool_size: 10
+ [entry_3],
+ thread_pool_size: 10,
+ binary_checksum: nil,
+ poll_retry_seconds: 0
)
.and_return(workflow_poller_1)
@@ -264,14 +515,16 @@ class TestWorkerActivity < Temporal::Activity
an_instance_of(Temporal::ExecutableLookup),
config,
[entry_2],
- thread_pool_size: 20
+ thread_pool_size: 20,
+ poll_retry_seconds: 0,
+ max_tasks_per_second: 0
)
.and_return(activity_poller_1)
subject.register_workflow(TestWorkerWorkflow)
subject.register_activity(TestWorkerActivity)
- subject.start
+ start_and_stop(subject)
expect(workflow_poller_1).to have_received(:start)
expect(activity_poller_1).to have_received(:start)
@@ -279,17 +532,19 @@ class TestWorkerActivity < Temporal::Activity
end
it 'sleeps while waiting for the shutdown' do
- allow(subject).to receive(:shutting_down?).and_return(false, false, false, true)
allow(subject).to receive(:sleep).and_return(nil)
- subject.start
+ start_and_stop(subject)
- expect(subject).to have_received(:sleep).with(1).exactly(3).times
+ expect(subject).to have_received(:sleep).with(1).once
end
describe 'signal handling' do
before do
- @thread = Thread.new { subject.start }
+ @thread = Thread.new do
+ @worker_pid = Process.pid
+ subject.start
+ end
sleep THREAD_SYNC_DELAY # give worker time to start
end
@@ -306,14 +561,14 @@ class TestWorkerActivity < Temporal::Activity
end
it 'traps TERM signal' do
- Process.kill('TERM', 0)
+ Process.kill('TERM', @worker_pid)
sleep THREAD_SYNC_DELAY
expect(@thread).not_to be_alive
end
it 'traps INT signal' do
- Process.kill('INT', 0)
+ Process.kill('INT', @worker_pid)
sleep THREAD_SYNC_DELAY
expect(@thread).not_to be_alive
@@ -347,17 +602,12 @@ class TestWorkerActivity < Temporal::Activity
subject.register_workflow(TestWorkerWorkflow)
subject.register_activity(TestWorkerActivity)
-
- @thread = Thread.new { subject.start }
- sleep THREAD_SYNC_DELAY # allow worker to start
end
it 'stops the pollers and cancels pending requests' do
- subject.stop
-
- sleep THREAD_SYNC_DELAY # wait for the worker to stop
+ thread = start_and_stop(subject)
- expect(@thread).not_to be_alive
+ expect(thread).not_to be_alive
expect(workflow_poller).to have_received(:stop_polling)
expect(workflow_poller).to have_received(:cancel_pending_requests)
expect(activity_poller).to have_received(:stop_polling)
@@ -365,11 +615,9 @@ class TestWorkerActivity < Temporal::Activity
end
it 'waits for the pollers to stop' do
- subject.stop
-
- sleep THREAD_SYNC_DELAY # wait for worker to stop
+ thread = start_and_stop(subject)
- expect(@thread).not_to be_alive
+ expect(thread).not_to be_alive
expect(workflow_poller).to have_received(:wait)
expect(activity_poller).to have_received(:wait)
end
diff --git a/spec/unit/lib/temporal/workflow/context_spec.rb b/spec/unit/lib/temporal/workflow/context_spec.rb
new file mode 100644
index 00000000..05a61282
--- /dev/null
+++ b/spec/unit/lib/temporal/workflow/context_spec.rb
@@ -0,0 +1,507 @@
+require 'temporal/activity'
+require 'temporal/workflow'
+require 'temporal/workflow/context'
+require 'temporal/workflow/dispatcher'
+require 'temporal/workflow/future'
+require 'temporal/workflow/query_registry'
+require 'temporal/workflow/stack_trace_tracker'
+require 'temporal/metadata/workflow'
+require 'time'
+
+class MyTestWorkflow < Temporal::Workflow; end
+class MyTestActivity < Temporal::Activity
+ RETURN_VALUE = 'this-is-a-return-value'.freeze
+
+ def execute
+ RETURN_VALUE
+ end
+end
+
+describe Temporal::Workflow::Context do
+ let(:state_manager) { instance_double('Temporal::Workflow::StateManager') }
+ let(:dispatcher) { Temporal::Workflow::Dispatcher.new }
+ let(:query_registry) do
+ double = instance_double('Temporal::Workflow::QueryRegistry')
+ allow(double).to receive(:register)
+ double
+ end
+ let(:metadata_hash) { Fabricate(:workflow_metadata).to_h }
+ let(:metadata) { Temporal::Metadata::Workflow.new(**metadata_hash) }
+ let(:config) { Temporal::Configuration.new }
+
+ let(:workflow_context) do
+ Temporal::Workflow::Context.new(
+ state_manager,
+ dispatcher,
+ MyTestWorkflow,
+ metadata,
+ config,
+ query_registry,
+ track_stack_trace
+ )
+ end
+ let(:child_workflow_execution) { Fabricate(:api_workflow_execution) }
+ let(:track_stack_trace) { false }
+
+ describe '#on_query' do
+ let(:handler) { Proc.new {} }
+
+ it 'registers a query with the query registry' do
+ workflow_context.on_query('test-query', &handler)
+
+ expect(query_registry).to have_received(:register).with('test-query') do |&block|
+ expect(block).to eq(handler)
+ end
+ end
+
+ it 'automatically registers stack trace query' do
+ expect(workflow_context).to_not be(nil) # ensure constructor is called
+ expect(query_registry).to have_received(:register)
+ .with(Temporal::Workflow::StackTraceTracker::STACK_TRACE_QUERY_NAME)
+ end
+
+ context 'stack trace' do
+ let(:track_stack_trace) { true }
+ let(:query_registry) { Temporal::Workflow::QueryRegistry.new }
+
+ it 'cleared to start' do
+ expect(workflow_context).to_not be(nil) # ensure constructor is called
+ stack_trace = query_registry.handle(Temporal::Workflow::StackTraceTracker::STACK_TRACE_QUERY_NAME)
+ expect(stack_trace).to eq("Fiber count: 0\n")
+ end
+ end
+ end
+
+ describe '#execute_activity' do
+ context "with header propagation" do
+ class TestHeaderPropagator
+ def inject!(header)
+ header['test'] = 'asdf'
+ end
+ end
+
+ it 'propagates the header' do
+ config.add_header_propagator(TestHeaderPropagator)
+ expect(state_manager).to receive(:schedule).with(Temporal::Workflow::Command::ScheduleActivity.new(
+ activity_id: nil,
+ activity_type: 'MyTestActivity',
+ input: [],
+ task_queue: 'default-task-queue',
+ retry_policy: nil,
+ timeouts: {execution: 315360000, run: 315360000, task: 10, schedule_to_close: nil, schedule_to_start: nil, start_to_close: 30, heartbeat: nil, default_heartbeat_throttle_interval: 30, max_heartbeat_throttle_interval: 60},
+ headers: { 'test' => 'asdf' }
+ ))
+ allow(dispatcher).to receive(:register_handler)
+ workflow_context.execute_activity(MyTestActivity)
+ end
+ end
+ end
+
+ describe '#execute_local_activity' do
+ it 'executes and schedules command' do
+ expect(state_manager).to receive(:next_side_effect)
+ expect(state_manager).to receive(:schedule).with(
+ Temporal::Workflow::Command::RecordMarker.new(
+ name: 'SIDE_EFFECT',
+ details: MyTestActivity::RETURN_VALUE
+ )
+ )
+ return_value = workflow_context.execute_local_activity(MyTestActivity)
+ expect(return_value).to eq(MyTestActivity::RETURN_VALUE)
+ end
+ end
+
+ describe '#execute_workflow' do
+ it 'returns the correct futures when starting a child workflow' do
+ allow(state_manager).to receive(:schedule)
+ allow(dispatcher).to receive(:register_handler)
+
+ result = workflow_context.execute_workflow(MyTestWorkflow)
+ expect(result).to be_instance_of(Temporal::Workflow::ChildWorkflowFuture)
+ expect(result.child_workflow_execution_future).to be_instance_of(Temporal::Workflow::Future)
+ end
+
+ it 'futures behave as expected when events are successful' do
+ started_proc = nil
+ completed_proc = nil
+
+ allow(state_manager).to receive(:schedule)
+ allow(dispatcher).to receive(:register_handler) do |target, event_name, &handler|
+ case event_name
+ when 'started'
+ started_proc = handler
+ when 'completed'
+ completed_proc = handler
+ end
+ end
+
+ child_workflow_future = workflow_context.execute_workflow(MyTestWorkflow)
+
+ # expect all futures to be false as nothing has happened
+ expect(child_workflow_future.finished?).to be false
+ expect(child_workflow_future.child_workflow_execution_future.finished?).to be false
+
+ # dispatch the start event and check if the child workflow execution changes to true
+ started_proc.call(child_workflow_execution)
+ expect(child_workflow_future.finished?).to be false
+ expect(child_workflow_future.child_workflow_execution_future.finished?).to be true
+ expect(child_workflow_future.child_workflow_execution_future.get).to be_instance_of(Temporalio::Api::Common::V1::WorkflowExecution)
+
+ # complete the workflow via dispatch and check if the child workflow future is finished
+ completed_proc.call('finished result')
+ expect(child_workflow_future.finished?).to be true
+ expect(child_workflow_future.child_workflow_execution_future.finished?).to be true
+ end
+
+ it 'futures behave as expected when child workflow fails' do
+ started_proc = nil
+ failed_proc = nil
+
+ allow(state_manager).to receive(:schedule)
+ allow(dispatcher).to receive(:register_handler) do |target, event_name, &handler|
+ case event_name
+ when 'started'
+ started_proc = handler
+ when 'failed'
+ failed_proc = handler
+ end
+ end
+
+ child_workflow_future = workflow_context.execute_workflow(MyTestWorkflow)
+
+ # expect all futures to be false as nothing has happened
+ expect(child_workflow_future.finished?).to be false
+ expect(child_workflow_future.child_workflow_execution_future.finished?).to be false
+
+ started_proc.call(child_workflow_execution)
+
+ # dispatch the failed event and check the child_workflow_future failed but the child_workflow_execution_future finished
+ failed_proc.call(Temporal::Workflow::Errors.generate_error_for_child_workflow_start("failed to start", "random-workflow-id"))
+ expect(child_workflow_future.failed?).to be true
+ expect(child_workflow_future.child_workflow_execution_future.failed?).to be false
+ end
+
+ it 'futures behave as expected when child execution workflow fails to start' do
+ failed_proc = nil
+
+ allow(state_manager).to receive(:schedule)
+ allow(dispatcher).to receive(:register_handler) do |target, event_name, &handler|
+ case event_name
+ when 'failed'
+ failed_proc = handler
+ end
+ end
+
+ child_workflow_future = workflow_context.execute_workflow(MyTestWorkflow)
+
+ # expect all futures to be false as nothing has happened
+ expect(child_workflow_future.finished?).to be false
+ expect(child_workflow_future.child_workflow_execution_future.finished?).to be false
+
+ # dispatch the failed event and check what happens
+ failed_proc.call(Temporal::Workflow::Errors.generate_error_for_child_workflow_start("failed to start", "random-workflow-id"))
+ expect(child_workflow_future.failed?).to be true
+ expect(child_workflow_future.child_workflow_execution_future.failed?).to be true
+ end
+ end
+
+ describe '#execute_workflow!' do
+ let(:child_workflow_future) do
+ double = instance_double('Temporal::Workflow::ChildWorkflowFuture')
+ allow(double).to receive(:get).and_return(result)
+ double
+ end
+
+ before do
+ expect(workflow_context).to receive(:execute_workflow).and_return(child_workflow_future)
+ end
+
+ context 'when future fails' do
+ let(:result) { Temporal::WorkflowRunError }
+
+ it 'raises the future result exception' do
+ expect(child_workflow_future).to receive(:failed?).and_return(true)
+ expect { workflow_context.execute_workflow!(MyTestWorkflow) }.to raise_error(result)
+ end
+ end
+
+ context 'when future succeeds' do
+ let(:result) { 'result' }
+
+ it 'returns the future result' do
+ expect(child_workflow_future).to receive(:failed?).and_return(false)
+ expect(workflow_context.execute_workflow!(MyTestWorkflow)).to eq(result)
+ end
+ end
+ end
+
+ describe '#schedule_workflow' do
+ let(:cron_schedule) { '* * * * *' }
+
+ context 'when given workflow options' do
+ it 'executes workflow with merged cron_schedule option' do
+ expect(workflow_context).to receive(:execute_workflow).with(MyTestWorkflow,
+ options: {
+ parent_close_policy: :abandon,
+ cron_schedule: cron_schedule
+ }
+ )
+ workflow_context.schedule_workflow(MyTestWorkflow, cron_schedule, options: { parent_close_policy: :abandon })
+ end
+ end
+
+ context 'when not given workflow options' do
+ it 'executes workflow with cron_schedule option' do
+ expect(workflow_context).to receive(:execute_workflow).with(MyTestWorkflow,
+ options: {
+ cron_schedule: cron_schedule
+ }
+ )
+ workflow_context.schedule_workflow(MyTestWorkflow, cron_schedule)
+ end
+ end
+ end
+
+ describe '#upsert_search_attributes' do
+ it 'does not accept nil' do
+ expect do
+ workflow_context.upsert_search_attributes(nil)
+ end.to raise_error(ArgumentError, 'search_attributes cannot be nil')
+ end
+
+ it 'requires a hash' do
+ expect do
+ workflow_context.upsert_search_attributes(['array_not_supported'])
+ end.to raise_error(ArgumentError, 'for search_attributes, expecting a Hash, not Array')
+ end
+
+ it 'requires a non-empty hash' do
+ expect do
+ workflow_context.upsert_search_attributes({})
+ end.to raise_error(ArgumentError, 'Cannot upsert an empty hash for search_attributes, as this would do nothing.')
+ end
+
+ it 'creates a command to execute the request' do
+ expect(state_manager).to receive(:schedule)
+ .with an_instance_of(Temporal::Workflow::Command::UpsertSearchAttributes)
+ workflow_context.upsert_search_attributes({ 'CustomIntField' => 5 })
+ end
+
+ it 'converts a Time to the ISO8601 UTC format expected by the Temporal server' do
+ time = Time.now
+ allow(state_manager).to receive(:schedule)
+ .with an_instance_of(Temporal::Workflow::Command::UpsertSearchAttributes)
+
+ expect(
+ workflow_context.upsert_search_attributes({'CustomDatetimeField' => time})
+ ).to eq({ 'CustomDatetimeField' => time.utc.iso8601 })
+ end
+
+ it 'gets latest search attributes from state_manager' do
+ search_attributes = { 'CustomIntField' => 42 }
+ expect(state_manager).to receive(:search_attributes).and_return(search_attributes)
+ expect(workflow_context.search_attributes).to eq(search_attributes)
+ end
+ end
+
+ describe '#name' do
+ it 'returns the name from the metadata' do
+ # Set in the :workflow_metadata Fabricator
+ expect(workflow_context.name).to eq("TestWorkflow")
+ end
+ end
+
+ describe '#wait_for_all' do
+ let(:target_1) { 'target1' }
+ let(:future_1) { Temporal::Workflow::Future.new(target_1, workflow_context) }
+ let(:target_2) { 'target2' }
+ let(:future_2) { Temporal::Workflow::Future.new(target_2, workflow_context) }
+
+ def wait_for_all
+ unblocked = false
+
+ Fiber.new do
+ workflow_context.wait_for_all(future_1, future_2)
+ unblocked = true
+ end.resume
+
+ proc { unblocked }
+ end
+
+ it 'no futures returns immediately' do
+ workflow_context.wait_for_all
+ end
+
+ it 'futures already finished' do
+ future_1.set('done')
+ future_2.set('also done')
+ check_unblocked = wait_for_all
+
+ expect(check_unblocked.call).to be(true)
+ end
+
+ it 'futures finished' do
+ check_unblocked = wait_for_all
+
+ future_1.set('done')
+ dispatcher.dispatch(target_1, 'foo')
+ expect(check_unblocked.call).to be(false)
+
+ future_2.set('also done')
+ dispatcher.dispatch(target_2, 'foo')
+ expect(check_unblocked.call).to be(true)
+ end
+ end
+
+ describe '#wait_for_any' do
+ let(:target_1) { 'target1' }
+ let(:future_1) { Temporal::Workflow::Future.new(target_1, workflow_context) }
+ let(:target_2) { 'target2' }
+ let(:future_2) { Temporal::Workflow::Future.new(target_2, workflow_context) }
+
+ def wait_for_any
+ unblocked = false
+
+ Fiber.new do
+ workflow_context.wait_for_any(future_1, future_2)
+ unblocked = true
+ end.resume
+
+ proc { unblocked }
+ end
+
+ it 'no futures returns immediately' do
+ workflow_context.wait_for_any
+ end
+
+ it 'one future already finished' do
+ future_1.set("it's done")
+ check_unblocked = wait_for_any
+
+ expect(check_unblocked.call).to be(true)
+ end
+
+ it 'one future becomes finished' do
+ check_unblocked = wait_for_any
+ future_1.set("it's done")
+ dispatcher.dispatch(target_1, 'foo')
+
+ expect(check_unblocked.call).to be(true)
+
+ # Dispatch a second time. This should not attempt to
+ # resume the fiber which by now should already be dead.
+ dispatcher.dispatch(target_1, 'foo')
+ end
+
+ it 'both futures becomes finished' do
+ check_unblocked = wait_for_any
+ future_1.set("it's done")
+ future_2.set("it's done")
+ dispatcher.dispatch(target_1, 'foo')
+ dispatcher.dispatch(target_2, 'foo')
+
+ expect(check_unblocked.call).to be(true)
+ end
+
+ it 'one future dispatched but not finished' do
+ check_unblocked = wait_for_any
+ dispatcher.dispatch(target_1, 'foo')
+
+ expect(check_unblocked.call).to be(false)
+ end
+
+ context 'stack trace' do
+ let(:track_stack_trace) { true }
+ let(:query_registry) { Temporal::Workflow::QueryRegistry.new }
+
+ it 'is recorded' do
+ wait_for_any
+ stack_trace = query_registry.handle(Temporal::Workflow::StackTraceTracker::STACK_TRACE_QUERY_NAME)
+
+ expect(stack_trace).to start_with('Fiber count: 1')
+ expect(stack_trace).to include('block in wait_for_any')
+ end
+
+ it 'cleared after unblocked' do
+ wait_for_any
+
+ future_1.set("it's done")
+ future_2.set("it's done")
+ dispatcher.dispatch(target_1, 'foo')
+ dispatcher.dispatch(target_2, 'foo')
+
+ stack_trace = query_registry.handle(Temporal::Workflow::StackTraceTracker::STACK_TRACE_QUERY_NAME)
+
+ expect(stack_trace).to eq("Fiber count: 0\n")
+ end
+ end
+ end
+
+ describe '#wait_until' do
+ def wait_until(&blk)
+ unblocked = false
+
+ Fiber.new do
+ workflow_context.wait_until(&blk)
+ unblocked = true
+ end.resume
+
+ proc { unblocked }
+ end
+
+ it 'block already true' do
+ check_unblocked = wait_until { true }
+
+ expect(check_unblocked.call).to be(true)
+ end
+
+ it 'block is always false' do
+ check_unblocked = wait_until { false }
+
+ dispatcher.dispatch('target', 'foo')
+ expect(check_unblocked.call).to be(false)
+ end
+
+ it 'block becomes true' do
+ value = false
+ check_unblocked = wait_until { value }
+
+ expect(check_unblocked.call).to be(false)
+
+ dispatcher.dispatch('target', 'foo')
+ expect(check_unblocked.call).to be(false)
+
+ value = true
+ dispatcher.dispatch('target', 'foo')
+ expect(check_unblocked.call).to be(true)
+
+ # Can dispatch again safely without resuming dead fiber
+ dispatcher.dispatch('target', 'foo')
+ end
+
+ context 'stack trace' do
+ let(:track_stack_trace) { true }
+ let(:query_registry) { Temporal::Workflow::QueryRegistry.new }
+
+ it 'is recorded' do
+ wait_until { false }
+ stack_trace = query_registry.handle(Temporal::Workflow::StackTraceTracker::STACK_TRACE_QUERY_NAME)
+
+ expect(stack_trace).to start_with('Fiber count: 1')
+ expect(stack_trace).to include('block in wait_until')
+ end
+
+ it 'cleared after unblocked' do
+ value = false
+ wait_until { value }
+
+ value = true
+ dispatcher.dispatch('target', 'foo')
+
+ stack_trace = query_registry.handle(Temporal::Workflow::StackTraceTracker::STACK_TRACE_QUERY_NAME)
+
+ expect(stack_trace).to eq("Fiber count: 0\n")
+ end
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/workflow/convenience_methods_spec.rb b/spec/unit/lib/temporal/workflow/convenience_methods_spec.rb
index 46183fac..f865f16e 100644
--- a/spec/unit/lib/temporal/workflow/convenience_methods_spec.rb
+++ b/spec/unit/lib/temporal/workflow/convenience_methods_spec.rb
@@ -19,7 +19,7 @@ class TestWorkflow < Temporal::Workflow; end
allow(context).to receive(:execute_workflow)
end
- it 'executes activity' do
+ it 'executes workflow' do
subject.execute(input, **options)
expect(context)
@@ -46,7 +46,7 @@ class TestWorkflow < Temporal::Workflow; end
allow(context).to receive(:execute_workflow!)
end
- it 'executes activity' do
+ it 'executes workflow' do
subject.execute!(input, **options)
expect(context)
@@ -65,4 +65,33 @@ class TestWorkflow < Temporal::Workflow; end
end
end
end
+
+ describe '.schedule' do
+ let(:cron_schedule) { '* * * * *' }
+
+ context 'with local context' do
+ before do
+ Temporal::ThreadLocalContext.set(context)
+ allow(context).to receive(:schedule_workflow)
+ end
+
+ it 'schedules workflow' do
+ subject.schedule(cron_schedule, input, **options)
+
+ expect(context)
+ .to have_received(:schedule_workflow)
+ .with(subject, cron_schedule, input, options)
+ end
+ end
+
+ context 'without local context' do
+ before { Temporal::ThreadLocalContext.set(nil) }
+
+ it 'raises an error' do
+ expect do
+ subject.schedule(cron_schedule, input, **options)
+ end.to raise_error('Called Workflow#schedule outside of a Workflow context')
+ end
+ end
+ end
end
diff --git a/spec/unit/lib/temporal/workflow/dispatcher_spec.rb b/spec/unit/lib/temporal/workflow/dispatcher_spec.rb
index d5e008f8..bf1887b9 100644
--- a/spec/unit/lib/temporal/workflow/dispatcher_spec.rb
+++ b/spec/unit/lib/temporal/workflow/dispatcher_spec.rb
@@ -1,13 +1,60 @@
require 'temporal/workflow/dispatcher'
+require 'temporal/workflow/history/event_target'
describe Temporal::Workflow::Dispatcher do
+ let(:target) { Temporal::Workflow::History::EventTarget.new(1, Temporal::Workflow::History::EventTarget::ACTIVITY_TYPE) }
+ let(:other_target) { Temporal::Workflow::History::EventTarget.new(2, Temporal::Workflow::History::EventTarget::TIMER_TYPE) }
+
describe '#register_handler' do
- it 'stores a given handler against the target' do
- block = -> { 'handler body' }
+ let(:block) { -> { 'handler body' } }
+ let(:event_name) { 'signaled' }
+ let(:dispatcher) do
+ subject.register_handler(target, event_name, &block)
+ subject
+ end
+ let(:handlers) { dispatcher.send(:event_handlers) }
+
+ it 'stores the target' do
+ expect(handlers.key?(target)).to be true
+ end
+
+ it 'stores the target and handler once' do
+ expect(handlers[target]).to be_kind_of(Hash)
+ expect(handlers[target].count).to eq 1
+ end
+
+ it 'associates the event name with the target' do
+ event = handlers[target][1]
+ expect(event.event_name).to eq(event_name)
+ end
+
+ it 'associates the handler with the target' do
+ event = handlers[target][1]
+ expect(event.handler).to eq(block)
+ end
+
+ it 'removes a given handler against the target' do
+ block1 = -> { 'handler body' }
+ block2 = -> { 'other handler body' }
+ block3 = -> { 'yet another handler body' }
+
+ handle1 = subject.register_handler(target, 'signaled', &block1)
+ subject.register_handler(target, 'signaled', &block2)
+ subject.register_handler(other_target, 'signaled', &block3)
+
+ expect(handlers[target][1].event_name).to eq('signaled')
+ expect(handlers[target][1].handler).to be(block1)
+
+ expect(handlers[target][2].event_name).to eq('signaled')
+ expect(handlers[target][2].handler).to be(block2)
- subject.register_handler('target', 'signaled', &block)
+ expect(handlers[other_target][3].event_name).to eq('signaled')
+ expect(handlers[other_target][3].handler).to be(block3)
- expect(subject.send(:handlers)).to include('target' => [['signaled', block]])
+ handle1.unregister
+ expect(handlers[target][1]).to be(nil)
+ expect(handlers[target][2]).to_not be(nil)
+ expect(handlers[other_target][3]).to_not be(nil)
end
end
@@ -22,14 +69,14 @@
allow(handler).to receive(:call)
end
- subject.register_handler('target', 'completed', &handler_1)
- subject.register_handler('other_target', 'completed', &handler_2)
- subject.register_handler('target', 'failed', &handler_3)
- subject.register_handler('target', 'completed', &handler_4)
+ subject.register_handler(target, 'completed', &handler_1)
+ subject.register_handler(other_target, 'completed', &handler_2)
+ subject.register_handler(target, 'failed', &handler_3)
+ subject.register_handler(target, 'completed', &handler_4)
end
it 'calls all matching handlers in the original order' do
- subject.dispatch('target', 'completed')
+ subject.dispatch(target, 'completed')
expect(handler_1).to have_received(:call).ordered
expect(handler_4).to have_received(:call).ordered
@@ -39,7 +86,7 @@
end
it 'passes given arguments to the handlers' do
- subject.dispatch('target', 'failed', ['TIME_OUT', 'Exceeded execution time'])
+ subject.dispatch(target, 'failed', ['TIME_OUT', 'Exceeded execution time'])
expect(handler_3).to have_received(:call).with('TIME_OUT', 'Exceeded execution time')
@@ -54,15 +101,68 @@
before do
allow(handler_5).to receive(:call)
- subject.register_handler('target', described_class::WILDCARD, &handler_5)
+ subject.register_handler(target, described_class::WILDCARD, &handler_5)
end
it 'calls the handler' do
- subject.dispatch('target', 'completed')
+ subject.dispatch(target, 'completed')
expect(handler_5).to have_received(:call)
end
+ end
+
+ context 'with WILDCARD target handler' do
+ let(:handler_6) { -> { 'sixth block' } }
+ let(:handler_7) { -> { 'seventh block' } }
+ before do
+ allow(handler_6).to receive(:call)
+ allow(handler_7).to receive(:call)
+
+ subject.register_handler(described_class::WILDCARD, described_class::WILDCARD, &handler_6)
+ subject.register_handler(target, 'completed', &handler_7)
+ end
+
+ it 'calls the handler' do
+ subject.dispatch(target, 'completed')
+
+ # Target handlers still invoked
+ expect(handler_1).to have_received(:call).ordered
+ expect(handler_4).to have_received(:call).ordered
+ expect(handler_6).to have_received(:call).ordered
+ expect(handler_7).to have_received(:call).ordered
+ end
+
+ it 'WILDCARD can be compared to an EventTarget object' do
+ expect(target.eql?(described_class::WILDCARD)).to be(false)
+ end
+ end
+
+ context 'with AT_END order' do
+ let(:handler_5) { -> { 'fifth block' } }
+ let(:handler_6) { -> { 'sixth block' } }
+ let(:handler_7) { -> { 'seventh block' } }
+ before do
+ allow(handler_5).to receive(:call)
+ allow(handler_6).to receive(:call)
+ allow(handler_7).to receive(:call)
+
+ subject.register_handler(described_class::WILDCARD, described_class::WILDCARD, described_class::Order::AT_END, &handler_5)
+ subject.register_handler(described_class::WILDCARD, described_class::WILDCARD, described_class::Order::AT_END, &handler_6)
+ subject.register_handler(target, 'completed', &handler_7)
+ end
+ it 'calls the handler' do
+ subject.dispatch(target, 'completed')
+
+ # Target handlers still invoked
+ expect(handler_1).to have_received(:call).ordered
+ expect(handler_4).to have_received(:call).ordered
+ expect(handler_7).to have_received(:call).ordered
+
+ # AT_END handlers are invoked at the end, in order
+ expect(handler_5).to have_received(:call).ordered
+ expect(handler_6).to have_received(:call).ordered
+ end
end
end
end
diff --git a/spec/unit/lib/temporal/workflow/errors_spec.rb b/spec/unit/lib/temporal/workflow/errors_spec.rb
index a9e053f6..bce9d477 100644
--- a/spec/unit/lib/temporal/workflow/errors_spec.rb
+++ b/spec/unit/lib/temporal/workflow/errors_spec.rb
@@ -4,9 +4,36 @@ class ErrorWithTwoArgs < StandardError
def initialize(message, another_argument); end
end
+class ErrorThatRaisesInInitialize < StandardError
+ def initialize(message)
+ # This class simulates an error class that has bugs in its initialize method, or where
+ # the arg isn't a string. It raises the sort of TypeError that would happen if you wrote
+ # 1 + message
+ raise TypeError.new("String can't be coerced into Integer")
+ end
+end
+
class SomeError < StandardError; end
+class MyFancyError < Exception
+
+ attr_reader :foo, :bar
+
+ # Initializer doesn't just take one argument as StandardError does.
+ def initialize(foo, bar)
+ @foo = foo
+ @bar = bar
+ end
+end
+
describe Temporal::Workflow::Errors do
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+
describe '.generate_error' do
it "instantiates properly when the client has the error" do
message = "An error message"
@@ -18,14 +45,25 @@ class SomeError < StandardError; end
error_class: SomeError.to_s
)
- e = Temporal::Workflow::Errors.generate_error(failure)
+ e = Temporal::Workflow::Errors.generate_error(failure, converter)
expect(e).to be_a(SomeError)
expect(e.message).to eq(message)
expect(e.backtrace).to eq(stack_trace)
end
- it "falls back to StandardError when the client doesn't have the error class" do
+ it 'correctly deserializes a complex error' do
+ error = MyFancyError.new('foo', 'bar')
+ failure = Temporal::Connection::Serializer::Failure.new(error, converter, serialize_whole_error: true).to_proto
+
+ e = Temporal::Workflow::Errors.generate_error(failure, converter)
+ expect(e).to be_a(MyFancyError)
+ expect(e.foo).to eq('foo')
+ expect(e.bar).to eq('bar')
+ end
+
+
+ it "falls back to StandardError when the client doesn't have the error class" do
allow(Temporal.logger).to receive(:error)
message = "An error message"
@@ -37,7 +75,7 @@ class SomeError < StandardError; end
error_class: 'NonexistentError',
)
- e = Temporal::Workflow::Errors.generate_error(failure)
+ e = Temporal::Workflow::Errors.generate_error(failure, converter)
expect(e).to be_a(StandardError)
expect(e.message).to eq("NonexistentError: An error message")
expect(e.backtrace).to eq(stack_trace)
@@ -51,7 +89,7 @@ class SomeError < StandardError; end
end
- it "falls back to StandardError when the client can't initialize the error class" do
+ it "falls back to StandardError when the client can't initialize the error class due to arity" do
allow(Temporal.logger).to receive(:error)
message = "An error message"
@@ -63,20 +101,58 @@ class SomeError < StandardError; end
error_class: ErrorWithTwoArgs.to_s,
)
- e = Temporal::Workflow::Errors.generate_error(failure)
+ e = Temporal::Workflow::Errors.generate_error(failure, converter)
expect(e).to be_a(StandardError)
expect(e.message).to eq("ErrorWithTwoArgs: An error message")
expect(e.backtrace).to eq(stack_trace)
expect(Temporal.logger)
.to have_received(:error)
.with(
- 'Could not instantiate original error. Defaulting to StandardError.',
+ "Could not instantiate original error. Defaulting to StandardError. "\
+ "Make sure the worker running your activities is configured with use_error_serialization_v2. "\
+ "If so, make sure the original error serialized by searching your logs for 'unserializable_error'. "\
+ "If not, you're using legacy serialization, and it's likely that "\
+ "your error's initializer takes something other than exactly one positional argument.",
{
original_error: "ErrorWithTwoArgs",
+ serialized_error: '"An error message"',
+ instantiation_error_class: "ArgumentError",
instantiation_error_message: "wrong number of arguments (given 1, expected 2)",
},
)
end
+ it "falls back to StandardError when the client can't initialize the error class when initialize doesn't take a string" do
+ allow(Temporal.logger).to receive(:error)
+
+ message = "An error message"
+ stack_trace = ["a fake backtrace"]
+ failure = Fabricate(
+ :api_application_failure,
+ message: message,
+ backtrace: stack_trace,
+ error_class: ErrorThatRaisesInInitialize.to_s,
+ )
+
+ e = Temporal::Workflow::Errors.generate_error(failure, converter)
+ expect(e).to be_a(StandardError)
+ expect(e.message).to eq("ErrorThatRaisesInInitialize: An error message")
+ expect(e.backtrace).to eq(stack_trace)
+ expect(Temporal.logger)
+ .to have_received(:error)
+ .with(
+ "Could not instantiate original error. Defaulting to StandardError. "\
+ "Make sure the worker running your activities is configured with use_error_serialization_v2. "\
+ "If so, make sure the original error serialized by searching your logs for 'unserializable_error'. "\
+ "If not, you're using legacy serialization, and it's likely that "\
+ "your error's initializer takes something other than exactly one positional argument.",
+ {
+ original_error: "ErrorThatRaisesInInitialize",
+ serialized_error: '"An error message"',
+ instantiation_error_class: "TypeError",
+ instantiation_error_message: "String can't be coerced into Integer",
+ },
+ )
+ end
end
end
diff --git a/spec/unit/lib/temporal/workflow/execution_info_spec.rb b/spec/unit/lib/temporal/workflow/execution_info_spec.rb
index fbac5ee0..6bef7b2d 100644
--- a/spec/unit/lib/temporal/workflow/execution_info_spec.rb
+++ b/spec/unit/lib/temporal/workflow/execution_info_spec.rb
@@ -1,8 +1,14 @@
require 'temporal/workflow/execution_info'
describe Temporal::Workflow::ExecutionInfo do
- subject { described_class.generate_from(api_info) }
- let(:api_info) { Fabricate(:api_workflow_execution_info) }
+ subject { described_class.generate_from(api_info, converter) }
+ let(:converter) do
+ Temporal::ConverterWrapper.new(
+ Temporal::Configuration::DEFAULT_CONVERTER,
+ Temporal::Configuration::DEFAULT_PAYLOAD_CODEC
+ )
+ end
+ let(:api_info) { Fabricate(:api_workflow_execution_info, workflow: 'TestWorkflow', workflow_id: '') }
describe '.generate_for' do
@@ -14,18 +20,29 @@
expect(subject.close_time).to be_a(Time)
expect(subject.status).to eq(:COMPLETED)
expect(subject.history_length).to eq(api_info.history_length)
+ expect(subject.memo).to eq({ 'foo' => 'bar' })
+ expect(subject.search_attributes).to eq({ 'foo' => 'bar' })
end
it 'freezes the info' do
expect(subject).to be_frozen
end
+
+ it 'deserializes if search_attributes is nil' do
+ api_info.search_attributes = nil
+
+ result = described_class.generate_from(api_info, converter)
+ expect(result.search_attributes).to eq({})
+ end
end
describe 'statuses' do
let(:api_info) do
Fabricate(
:api_workflow_execution_info,
- status: Temporal::Api::Enums::V1::WorkflowExecutionStatus::WORKFLOW_EXECUTION_STATUS_TERMINATED
+ workflow: 'TestWorkflow',
+ workflow_id: '',
+ status: Temporalio::Api::Enums::V1::WorkflowExecutionStatus::WORKFLOW_EXECUTION_STATUS_TERMINATED
)
end
@@ -50,4 +67,33 @@
expect(subject).not_to be_timed_out
end
end
+
+ describe '#closed?' do
+ Temporal::Workflow::Status::API_STATUS_MAP.keys.select { |x| x != :WORKFLOW_EXECUTION_STATUS_RUNNING }.each do |status|
+ context "when status is #{status}" do
+ let(:api_info) do
+ Fabricate(
+ :api_workflow_execution_info,
+ workflow: 'TestWorkflow',
+ workflow_id: '',
+ status: Temporalio::Api::Enums::V1::WorkflowExecutionStatus.resolve(status)
+ )
+ end
+ it { is_expected.to be_closed }
+ end
+ end
+
+ context "when status is RUNNING" do
+ let(:api_info) do
+ Fabricate(
+ :api_workflow_execution_info,
+ workflow: 'TestWorkflow',
+ workflow_id: '',
+ status: Temporalio::Api::Enums::V1::WorkflowExecutionStatus.resolve(:WORKFLOW_EXECUTION_STATUS_RUNNING)
+ )
+ end
+
+ it { is_expected.not_to be_closed }
+ end
+ end
end
diff --git a/spec/unit/lib/temporal/workflow/executor_spec.rb b/spec/unit/lib/temporal/workflow/executor_spec.rb
new file mode 100644
index 00000000..714dc72b
--- /dev/null
+++ b/spec/unit/lib/temporal/workflow/executor_spec.rb
@@ -0,0 +1,162 @@
+require 'temporal/middleware/chain'
+require 'temporal/workflow/executor'
+require 'temporal/workflow/history'
+require 'temporal/workflow'
+require 'temporal/workflow/task_processor'
+require 'temporal/workflow/query_registry'
+
+describe Temporal::Workflow::Executor do
+ subject { described_class.new(workflow, history, workflow_metadata, config, false, middleware_chain) }
+
+ let(:connection) { instance_double('Temporal::Connection::GRPC') }
+ let(:workflow_started_event) { Fabricate(:api_workflow_execution_started_event, event_id: 1) }
+ let(:history) do
+ Temporal::Workflow::History.new([
+ workflow_started_event,
+ Fabricate(:api_workflow_task_scheduled_event, event_id: 2),
+ Fabricate(:api_workflow_task_started_event, event_id: 3),
+ Fabricate(:api_workflow_task_completed_event, event_id: 4)
+ ])
+ end
+ let(:workflow) { TestWorkflow }
+ let(:workflow_metadata) { Fabricate(:workflow_metadata) }
+ let(:config) { Temporal::Configuration.new }
+ let(:middleware_chain) { Temporal::Middleware::Chain.new }
+
+ before do
+ allow(Temporal::Connection).to receive(:generate).and_return(connection)
+ end
+
+ class TestWorkflow < Temporal::Workflow
+ def execute
+ 'test'
+ end
+ end
+
+ describe '#run' do
+ it 'runs a workflow' do
+ allow(workflow).to receive(:execute_in_context).and_call_original
+ expect(middleware_chain).to receive(:invoke).and_call_original
+
+ subject.run
+
+ expect(workflow)
+ .to have_received(:execute_in_context)
+ .with(
+ an_instance_of(Temporal::Workflow::Context),
+ nil
+ )
+ end
+
+ it 'returns a complete workflow decision' do
+ decisions = subject.run
+
+ expect(decisions.commands.length).to eq(1)
+ expect(decisions.new_sdk_flags_used).to be_empty
+
+ decision_id, decision = decisions.commands.first
+ expect(decision_id).to eq(history.events.length + 1)
+ expect(decision).to be_an_instance_of(Temporal::Workflow::Command::CompleteWorkflow)
+ expect(decision.result).to eq('test')
+ end
+
+ context 'history with signal' do
+ let(:history) do
+ Temporal::Workflow::History.new([
+ workflow_started_event,
+ Fabricate(:api_workflow_execution_signaled_event, event_id: 2),
+ Fabricate(:api_workflow_task_scheduled_event, event_id: 3),
+ Fabricate(:api_workflow_task_started_event, event_id: 4)
+ ])
+ end
+ let(:system_info) { Fabricate(:api_get_system_info) }
+
+ context 'signals first config enabled' do
+ it 'set signals first sdk flag' do
+ allow(connection).to receive(:get_system_info).and_return(system_info)
+
+ decisions = subject.run
+
+ expect(decisions.commands.length).to eq(1)
+ expect(decisions.new_sdk_flags_used).to eq(
+ Set.new([
+ Temporal::Workflow::SDKFlags::SAVE_FIRST_TASK_SIGNALS
+ ]))
+ end
+ end
+
+ context 'signals first config disabled' do
+ let(:config) { Temporal::Configuration.new.tap { |c| c.legacy_signals = true } }
+ it 'no sdk flag' do
+ decisions = subject.run
+
+ expect(decisions.commands.length).to eq(1)
+ expect(decisions.new_sdk_flags_used).to be_empty
+ end
+ end
+ end
+
+ it 'generates workflow metadata' do
+ allow(Temporal::Metadata::Workflow).to receive(:new)
+ payload = Temporalio::Api::Common::V1::Payload.new(
+ metadata: { 'encoding' => 'json/plain' },
+ data: '"bar"'.b
+ )
+ header =
+ Google::Protobuf::Map.new(:string, :message, Temporalio::Api::Common::V1::Payload, { 'Foo' => payload })
+ workflow_started_event.workflow_execution_started_event_attributes.header =
+ Fabricate(:api_header, fields: header)
+
+ subject.run
+
+ event_attributes = workflow_started_event.workflow_execution_started_event_attributes
+ expect(Temporal::Metadata::Workflow)
+ .to have_received(:new)
+ .with(
+ namespace: workflow_metadata.namespace,
+ id: workflow_metadata.workflow_id,
+ name: event_attributes.workflow_type.name,
+ run_id: event_attributes.original_execution_run_id,
+ parent_id: nil,
+ parent_run_id: nil,
+ attempt: event_attributes.attempt,
+ task_queue: event_attributes.task_queue.name,
+ headers: { 'Foo' => 'bar' },
+ run_started_at: workflow_started_event.event_time.to_time,
+ memo: {}
+ )
+ end
+ end
+
+ describe '#process_queries' do
+ let(:query_registry) { Temporal::Workflow::QueryRegistry.new }
+ let(:query_1_result) { 42 }
+ let(:query_2_error) { StandardError.new('Test query failure') }
+ let(:queries) do
+ {
+ '1' => Temporal::Workflow::TaskProcessor::Query.new(Fabricate(:api_workflow_query, query_type: 'success'), config.converter),
+ '2' => Temporal::Workflow::TaskProcessor::Query.new(Fabricate(:api_workflow_query, query_type: 'failure'), config.converter),
+ '3' => Temporal::Workflow::TaskProcessor::Query.new(Fabricate(:api_workflow_query, query_type: 'unknown'), config.converter)
+ }
+ end
+
+ before do
+ allow(Temporal::Workflow::QueryRegistry).to receive(:new).and_return(query_registry)
+ query_registry.register('success') { query_1_result }
+ query_registry.register('failure') { raise query_2_error }
+ end
+
+ it 'returns query results' do
+ results = subject.process_queries(queries)
+
+ expect(results.length).to eq(3)
+ expect(results['1']).to be_a(Temporal::Workflow::QueryResult::Answer)
+ expect(results['1'].result).to eq(query_1_result)
+ expect(results['2']).to be_a(Temporal::Workflow::QueryResult::Failure)
+ expect(results['2'].error).to eq(query_2_error)
+ expect(results['3']).to be_a(Temporal::Workflow::QueryResult::Failure)
+ expect(results['3'].error).to be_a(Temporal::QueryFailed)
+ expect(results['3'].error.message).to eq("Workflow did not register a handler for 'unknown'. KnownQueryTypes=[success, failure]")
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/workflow/future_spec.rb b/spec/unit/lib/temporal/workflow/future_spec.rb
index 4fbc5b37..293a7d84 100644
--- a/spec/unit/lib/temporal/workflow/future_spec.rb
+++ b/spec/unit/lib/temporal/workflow/future_spec.rb
@@ -46,8 +46,8 @@
expect(subject.get).to be exception
end
- it 'calls context.wait_for if not finished' do
- allow(workflow_context).to receive(:wait_for).with(subject)
+ it 'calls context.wait_for_any if not finished' do
+ allow(workflow_context).to receive(:wait_for_any).with(subject)
subject.get
end
end
@@ -58,8 +58,8 @@
subject.wait
end
- it 'calls context.wait_for if not already done' do
- allow(workflow_context).to receive(:wait_for).with(subject)
+ it 'calls context.wait_for_any if not already done' do
+ allow(workflow_context).to receive(:wait_for_any).with(subject)
subject.wait
end
end
diff --git a/spec/unit/lib/temporal/workflow/history/event_target_spec.rb b/spec/unit/lib/temporal/workflow/history/event_target_spec.rb
index 717c4572..2f2c80bd 100644
--- a/spec/unit/lib/temporal/workflow/history/event_target_spec.rb
+++ b/spec/unit/lib/temporal/workflow/history/event_target_spec.rb
@@ -21,5 +21,21 @@
expect(subject.type).to eq(described_class::CANCEL_TIMER_REQUEST_TYPE)
end
end
+
+ context 'when event is ACTIVITY_CANCELED' do
+ let(:raw_event) { Fabricate(:api_activity_task_canceled_event) }
+
+ it 'sets type to activity' do
+ expect(subject.type).to eq(described_class::ACTIVITY_TYPE)
+ end
+ end
+
+ context 'when event is ACTIVITY_TASK_CANCEL_REQUESTED' do
+ let(:raw_event) { Fabricate(:api_activity_task_cancel_requested_event) }
+
+ it 'sets type to cancel_activity_request' do
+ expect(subject.type).to eq(described_class::CANCEL_ACTIVITY_REQUEST_TYPE)
+ end
+ end
end
end
diff --git a/spec/unit/lib/temporal/workflow/history_spec.rb b/spec/unit/lib/temporal/workflow/history_spec.rb
deleted file mode 100644
index 8a058fc1..00000000
--- a/spec/unit/lib/temporal/workflow/history_spec.rb
+++ /dev/null
@@ -1,7 +0,0 @@
-require 'temporal/workflow/history'
-
-describe Temporal::Workflow::History do
- describe '#next_window' do
-
- end
-end
diff --git a/spec/unit/lib/temporal/workflow/poller_spec.rb b/spec/unit/lib/temporal/workflow/poller_spec.rb
index 7f907f69..020e2e91 100644
--- a/spec/unit/lib/temporal/workflow/poller_spec.rb
+++ b/spec/unit/lib/temporal/workflow/poller_spec.rb
@@ -1,6 +1,7 @@
-require 'temporal/workflow/poller'
-require 'temporal/middleware/entry'
require 'temporal/configuration'
+require 'temporal/metric_keys'
+require 'temporal/middleware/entry'
+require 'temporal/workflow/poller'
describe Temporal::Workflow::Poller do
let(:connection) { instance_double('Temporal::Connection::GRPC') }
@@ -10,109 +11,171 @@
let(:config) { Temporal::Configuration.new }
let(:middleware_chain) { instance_double(Temporal::Middleware::Chain) }
let(:middleware) { [] }
+ let(:workflow_middleware_chain) { instance_double(Temporal::Middleware::Chain) }
+ let(:workflow_middleware) { [] }
+ let(:empty_middleware_chain) { instance_double(Temporal::Middleware::Chain) }
+ let(:binary_checksum) { 'v1.0.0' }
+ let(:busy_wait_delay) {0.01}
+
+ subject do
+ described_class.new(
+ namespace,
+ task_queue,
+ lookup,
+ config,
+ middleware,
+ workflow_middleware,
+ {
+ binary_checksum: binary_checksum
+ }
+ )
+ end
- subject { described_class.new(namespace, task_queue, lookup, config, middleware) }
+ # poller will receive task times times, and nil thereafter.
+ # poller will be shut down after that
+ def poll(task, times: 1)
+ polled_times = 0
+ allow(connection).to receive(:poll_workflow_task_queue) do
+ polled_times += 1
+ if polled_times <= times
+ task
+ else
+ nil
+ end
+ end
+
+ subject.start
+
+ while polled_times < times
+ sleep(busy_wait_delay)
+ end
+ # stop poller before inspecting
+ subject.stop_polling; subject.wait
+ polled_times
+ end
before do
allow(Temporal::Connection).to receive(:generate).and_return(connection)
- allow(Temporal::Middleware::Chain).to receive(:new).and_return(middleware_chain)
+ allow(Temporal::Middleware::Chain).to receive(:new).with(workflow_middleware).and_return(workflow_middleware_chain)
+ allow(Temporal::Middleware::Chain).to receive(:new).with(middleware).and_return(middleware_chain)
+ allow(Temporal::Middleware::Chain).to receive(:new).with([]).and_return(empty_middleware_chain)
allow(Temporal.metrics).to receive(:timing)
+ allow(Temporal.metrics).to receive(:increment)
end
describe '#start' do
- it 'polls for decision tasks' do
- allow(subject).to receive(:shutting_down?).and_return(false, false, true)
- allow(connection).to receive(:poll_workflow_task_queue).and_return(nil)
-
+ it 'polls for workflow tasks' do
subject.start
-
- # stop poller before inspecting
- subject.stop_polling; subject.wait
-
- expect(connection)
- .to have_received(:poll_workflow_task_queue)
- .with(namespace: namespace, task_queue: task_queue)
- .twice
+ times = poll(nil, times: 2)
+ expect(times).to be >=(2)
end
it 'reports time since last poll' do
- allow(subject).to receive(:shutting_down?).and_return(false, false, true)
- allow(connection).to receive(:poll_workflow_task_queue).and_return(nil)
+ poll(nil)
- subject.start
+ expect(Temporal.metrics)
+ .to have_received(:timing)
+ .with(
+ Temporal::MetricKeys::WORKFLOW_POLLER_TIME_SINCE_LAST_POLL,
+ an_instance_of(Integer),
+ namespace: namespace,
+ task_queue: task_queue
+ )
+ .at_least(2).times
+ end
- # stop poller before inspecting
- subject.stop_polling; subject.wait
+ it 'reports polling completed with received_task false' do
+ poll(nil)
expect(Temporal.metrics)
- .to have_received(:timing)
+ .to have_received(:increment)
.with(
- 'workflow_poller.time_since_last_poll',
- an_instance_of(Fixnum),
+ Temporal::MetricKeys::WORKFLOW_POLLER_POLL_COMPLETED,
+ received_task: 'false',
namespace: namespace,
task_queue: task_queue
)
- .twice
+ .at_least(2).times
end
- context 'when an decision task is received' do
+ context 'when a workflow task is received' do
let(:task_processor) do
instance_double(Temporal::Workflow::TaskProcessor, process: nil)
end
let(:task) { Fabricate(:api_workflow_task) }
before do
- allow(subject).to receive(:shutting_down?).and_return(false, true)
- allow(connection).to receive(:poll_workflow_task_queue).and_return(task)
allow(Temporal::Workflow::TaskProcessor).to receive(:new).and_return(task_processor)
end
it 'uses TaskProcessor to process tasks' do
- subject.start
-
- # stop poller before inspecting
- subject.stop_polling; subject.wait
+ poll(task)
expect(Temporal::Workflow::TaskProcessor)
.to have_received(:new)
- .with(task, namespace, lookup, middleware_chain, config)
+ .with(task, task_queue, namespace, lookup, empty_middleware_chain, empty_middleware_chain, config, binary_checksum)
expect(task_processor).to have_received(:process)
end
+ it 'reports polling completed with received_task true' do
+ poll(task)
+
+ expect(Temporal.metrics)
+ .to have_received(:increment)
+ .with(
+ Temporal::MetricKeys::WORKFLOW_POLLER_POLL_COMPLETED,
+ received_task: 'true',
+ namespace: namespace,
+ task_queue: task_queue
+ )
+ .once
+ end
+
context 'with middleware configured' do
class TestPollerMiddleware
def initialize(_); end
+
def call(_); end
end
+ let(:workflow_middleware) { [entry_1] }
let(:middleware) { [entry_1, entry_2] }
let(:entry_1) { Temporal::Middleware::Entry.new(TestPollerMiddleware, '1') }
let(:entry_2) { Temporal::Middleware::Entry.new(TestPollerMiddleware, '2') }
- it 'initializes middleware chain and passes it down to TaskProcessor' do
- subject.start
- # stop poller before inspecting
- subject.stop_polling; subject.wait
+ it 'initializes middleware chain and passes it down to TaskProcessor' do
+ poll(task)
expect(Temporal::Middleware::Chain).to have_received(:new).with(middleware)
+ expect(Temporal::Middleware::Chain).to have_received(:new).with(workflow_middleware)
expect(Temporal::Workflow::TaskProcessor)
.to have_received(:new)
- .with(task, namespace, lookup, middleware_chain, config)
+ .with(task, task_queue, namespace, lookup, middleware_chain, workflow_middleware_chain, config, binary_checksum)
end
end
end
context 'when connection is unable to poll' do
before do
- allow(subject).to receive(:shutting_down?).and_return(false, true)
- allow(connection).to receive(:poll_workflow_task_queue).and_raise(StandardError)
+ allow(subject).to receive(:sleep).and_return(nil)
end
it 'logs' do
allow(Temporal.logger).to receive(:error)
+ polled = false
+ allow(connection).to receive(:poll_workflow_task_queue) do
+ if !polled
+ polled = true
+ raise StandardError
+ end
+ end
+
subject.start
+ while !polled
+ sleep(busy_wait_delay)
+ end
# stop poller before inspecting
subject.stop_polling; subject.wait
@@ -126,6 +189,67 @@ def call(_); end
error: '#'
)
end
+
+ it 'does not sleep' do
+ polled = false
+ allow(connection).to receive(:poll_workflow_task_queue) do
+ if !polled
+ polled = true
+ raise StandardError
+ end
+ end
+
+ subject.start
+ while !polled
+ sleep(busy_wait_delay)
+ end
+
+ # stop poller before inspecting
+ subject.stop_polling; subject.wait
+
+ expect(subject).to have_received(:sleep).with(0).once
+ end
+ end
+
+ context 'when connection is unable to poll and poll_retry_seconds is set' do
+ subject do
+ described_class.new(
+ namespace,
+ task_queue,
+ lookup,
+ config,
+ middleware,
+ workflow_middleware,
+ {
+ binary_checksum: binary_checksum,
+ poll_retry_seconds: 5
+ }
+ )
+ end
+
+ before do
+ allow(subject).to receive(:sleep).and_return(nil)
+ end
+
+ it 'sleeps' do
+ polled = false
+ allow(connection).to receive(:poll_workflow_task_queue) do
+ if !polled
+ polled = true
+ raise StandardError
+ end
+ end
+
+ subject.start
+ while !polled
+ sleep(busy_wait_delay)
+ end
+
+ # stop poller before inspecting
+ subject.stop_polling; subject.wait
+
+ expect(subject).to have_received(:sleep).with(5).once
+ end
end
end
end
diff --git a/spec/unit/lib/temporal/workflow/query_registry_spec.rb b/spec/unit/lib/temporal/workflow/query_registry_spec.rb
new file mode 100644
index 00000000..d65405fc
--- /dev/null
+++ b/spec/unit/lib/temporal/workflow/query_registry_spec.rb
@@ -0,0 +1,67 @@
+require 'temporal/workflow/query_registry'
+
+describe Temporal::Workflow::QueryRegistry do
+ subject { described_class.new }
+
+ describe '#register' do
+ let(:handler) { Proc.new {} }
+
+ it 'registers a query handler' do
+ subject.register('test-query', &handler)
+
+ expect(subject.send(:handlers)['test-query']).to eq(handler)
+ end
+
+ context 'when query handler is already registered' do
+ let(:handler_2) { Proc.new {} }
+
+ before { subject.register('test-query', &handler) }
+
+ it 'warns' do
+ allow(subject).to receive(:warn)
+
+ subject.register('test-query', &handler_2)
+
+ expect(subject)
+ .to have_received(:warn)
+ .with('[NOTICE] Overwriting a query handler for test-query')
+ end
+
+ it 're-registers a query handler' do
+ subject.register('test-query', &handler_2)
+
+ expect(subject.send(:handlers)['test-query']).to eq(handler_2)
+ end
+ end
+ end
+
+ describe '#handle' do
+ context 'when a query handler has been registered' do
+ let(:handler) { Proc.new { 42 } }
+
+ before { subject.register('test-query', &handler) }
+
+ it 'runs the handler and returns the result' do
+ expect(subject.handle('test-query')).to eq(42)
+ end
+ end
+
+ context 'when a query handler has been registered with args' do
+ let(:handler) { Proc.new { |arg_1, arg_2| arg_1 + arg_2 } }
+
+ before { subject.register('test-query', &handler) }
+
+ it 'runs the handler and returns the result' do
+ expect(subject.handle('test-query', [3, 5])).to eq(8)
+ end
+ end
+
+ context 'when a query handler has not been registered' do
+ it 'raises' do
+ expect do
+ subject.handle('test-query')
+ end.to raise_error(Temporal::QueryFailed, "Workflow did not register a handler for 'test-query'. KnownQueryTypes=[]")
+ end
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/workflow/query_result_spec.rb b/spec/unit/lib/temporal/workflow/query_result_spec.rb
new file mode 100644
index 00000000..222446a8
--- /dev/null
+++ b/spec/unit/lib/temporal/workflow/query_result_spec.rb
@@ -0,0 +1,25 @@
+require 'temporal/workflow/query_result'
+
+describe Temporal::Workflow::QueryResult do
+ describe '.answer' do
+ it 'returns an anwer query result' do
+ result = described_class.answer(42)
+
+ expect(result).to be_a(Temporal::Workflow::QueryResult::Answer)
+ expect(result).to be_frozen
+ expect(result.result).to eq(42)
+ end
+ end
+
+ describe '.failure' do
+ let(:error) { StandardError.new('Test query failure') }
+
+ it 'returns a failure query result' do
+ result = described_class.failure(error)
+
+ expect(result).to be_a(Temporal::Workflow::QueryResult::Failure)
+ expect(result).to be_frozen
+ expect(result.error).to eq(error)
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/workflow/stack_trace_tracker_spec.rb b/spec/unit/lib/temporal/workflow/stack_trace_tracker_spec.rb
new file mode 100644
index 00000000..5db23fb7
--- /dev/null
+++ b/spec/unit/lib/temporal/workflow/stack_trace_tracker_spec.rb
@@ -0,0 +1,56 @@
+require 'temporal/workflow/stack_trace_tracker'
+
+describe Temporal::Workflow::StackTraceTracker do
+ subject { described_class.new }
+ describe '#to_s' do
+ def record_function
+ subject.record
+ end
+
+ def record_and_clear_function
+ subject.record
+ subject.clear
+ end
+
+ def record_two_function
+ subject.record
+
+ Fiber.new do
+ subject.record
+ end.resume
+ end
+
+ it 'starts empty' do
+ expect(subject.to_s).to eq("Fiber count: 0\n")
+ end
+
+ it 'one fiber' do
+ record_function
+ stack_trace = subject.to_s
+ expect(stack_trace).to start_with("Fiber count: 1\n\n")
+
+ first_stack_line = stack_trace.split("\n")[2]
+ expect(first_stack_line).to include("record_function")
+ end
+
+ it 'one fiber cleared' do
+ record_and_clear_function
+ stack_trace = subject.to_s
+ expect(stack_trace).to start_with("Fiber count: 0\n")
+ end
+
+ it 'two fibers' do
+ record_two_function
+ output = subject.to_s
+ expect(output).to start_with("Fiber count: 2\n\n")
+
+ stack_traces = output.split("\n\n")
+
+ first_stack = stack_traces[1]
+ expect(first_stack).to include("record_two_function")
+
+ second_stack = stack_traces[2]
+ expect(second_stack).to include("block in record_two_function")
+ end
+ end
+end
\ No newline at end of file
diff --git a/spec/unit/lib/temporal/workflow/state_manager_spec.rb b/spec/unit/lib/temporal/workflow/state_manager_spec.rb
new file mode 100644
index 00000000..8aa8f9aa
--- /dev/null
+++ b/spec/unit/lib/temporal/workflow/state_manager_spec.rb
@@ -0,0 +1,666 @@
+require 'temporal/workflow'
+require 'temporal/workflow/dispatcher'
+require 'temporal/workflow/history/event'
+require 'temporal/workflow/history/window'
+require 'temporal/workflow/signal'
+require 'temporal/workflow/state_manager'
+require 'temporal/errors'
+
+describe Temporal::Workflow::StateManager do
+ describe '#schedule' do
+ class MyWorkflow < Temporal::Workflow; end
+
+ # These are all "terminal" commands
+ [
+ Temporal::Workflow::Command::ContinueAsNew.new(
+ workflow_type: MyWorkflow,
+ task_queue: 'dummy'
+ ),
+ Temporal::Workflow::Command::FailWorkflow.new(
+ exception: StandardError.new('dummy')
+ ),
+ Temporal::Workflow::Command::CompleteWorkflow.new(
+ result: 5
+ )
+ ].each do |terminal_command|
+ it "fails to validate if #{terminal_command.class} is not the last command scheduled" do
+ state_manager = described_class.new(Temporal::Workflow::Dispatcher.new, Temporal::Configuration.new)
+
+ next_command = Temporal::Workflow::Command::RecordMarker.new(
+ name: Temporal::Workflow::StateManager::RELEASE_MARKER,
+ details: 'dummy'
+ )
+
+ state_manager.schedule(terminal_command)
+ expect do
+ state_manager.schedule(next_command)
+ end.to raise_error(Temporal::WorkflowAlreadyCompletingError)
+ end
+ end
+ end
+
+ describe '#apply' do
+ let(:dispatcher) { Temporal::Workflow::Dispatcher.new }
+ let(:state_manager) do
+ Temporal::Workflow::StateManager.new(dispatcher, config)
+ end
+ let(:config) { Temporal::Configuration.new }
+ let(:connection) { instance_double('Temporal::Connection::GRPC') }
+ let(:system_info) { Fabricate(:api_get_system_info) }
+
+ before do
+ allow(Temporal::Connection).to receive(:generate).and_return(connection)
+ end
+
+ context 'workflow execution started' do
+ let(:history) do
+ Temporal::Workflow::History.new([Fabricate(:api_workflow_execution_started_event, event_id: 1)])
+ end
+
+ it 'dispatcher invoked for start' do
+ expect(dispatcher).to receive(:dispatch).with(
+ Temporal::Workflow::History::EventTarget.start_workflow, 'started', instance_of(Array)
+ ).once
+ state_manager.apply(history.next_window)
+ end
+ end
+
+ context 'workflow execution started with signal' do
+ let(:signal_entry) { Fabricate(:api_workflow_execution_signaled_event, event_id: 2) }
+ let(:history) do
+ Temporal::Workflow::History.new(
+ [
+ Fabricate(:api_workflow_execution_started_event, event_id: 1),
+ signal_entry
+ ]
+ )
+ end
+
+ it 'dispatcher invoked for start' do
+ allow(connection).to receive(:get_system_info).and_return(system_info)
+
+ expect(dispatcher).to receive(:dispatch).with(
+ Temporal::Workflow::Signal.new(signal_entry.workflow_execution_signaled_event_attributes.signal_name),
+ 'signaled',
+ [
+ signal_entry.workflow_execution_signaled_event_attributes.signal_name,
+ signal_entry.workflow_execution_signaled_event_attributes.input
+ ]
+ ).once.ordered
+ expect(dispatcher).to receive(:dispatch).with(
+ Temporal::Workflow::History::EventTarget.start_workflow, 'started', instance_of(Array)
+ ).once.ordered
+
+ state_manager.apply(history.next_window)
+ end
+ end
+
+ context 'workflow execution started with signal, replaying without flag' do
+ let(:signal_entry) { Fabricate(:api_workflow_execution_signaled_event, event_id: 2) }
+ let(:history) do
+ Temporal::Workflow::History.new(
+ [
+ Fabricate(:api_workflow_execution_started_event, event_id: 1),
+ signal_entry,
+ Fabricate(:api_workflow_task_scheduled_event, event_id: 3),
+ Fabricate(:api_workflow_task_started_event, event_id: 4),
+ Fabricate(
+ :api_workflow_task_completed_event,
+ event_id: 5,
+ sdk_flags: sdk_flags
+ )
+ ]
+ )
+ end
+
+ context 'replaying without HANDLE_SIGNALS_FIRST sdk flag' do
+ let(:sdk_flags) { [] }
+ it 'dispatcher invokes start before signal' do
+ allow(connection).to receive(:get_system_info).and_return(system_info)
+
+ expect(dispatcher).to receive(:dispatch).with(
+ Temporal::Workflow::History::EventTarget.start_workflow, 'started', instance_of(Array)
+ ).once.ordered
+ expect(dispatcher).to receive(:dispatch).with(
+ Temporal::Workflow::Signal.new(signal_entry.workflow_execution_signaled_event_attributes.signal_name),
+ 'signaled',
+ [
+ signal_entry.workflow_execution_signaled_event_attributes.signal_name,
+ signal_entry.workflow_execution_signaled_event_attributes.input
+ ]
+ ).once.ordered
+
+ state_manager.apply(history.next_window)
+ end
+ end
+
+ context 'replaying without SAVE_FIRST_TASK_SIGNALS sdk flag' do
+ let(:sdk_flags) { [Temporal::Workflow::SDKFlags::HANDLE_SIGNALS_FIRST] }
+ it 'dispatcher invokes start before signal' do
+ allow(connection).to receive(:get_system_info).and_return(system_info)
+
+ expect(dispatcher).to receive(:dispatch).with(
+ Temporal::Workflow::History::EventTarget.start_workflow, 'started', instance_of(Array)
+ ).once.ordered
+ expect(dispatcher).to receive(:dispatch).with(
+ Temporal::Workflow::Signal.new(signal_entry.workflow_execution_signaled_event_attributes.signal_name),
+ 'signaled',
+ [
+ signal_entry.workflow_execution_signaled_event_attributes.signal_name,
+ signal_entry.workflow_execution_signaled_event_attributes.input
+ ]
+ ).once.ordered
+
+ state_manager.apply(history.next_window)
+ end
+ end
+
+ context 'replaying with SAVE_FIRST_TASK_SIGNALS sdk flag' do
+ let(:sdk_flags) do [
+ Temporal::Workflow::SDKFlags::HANDLE_SIGNALS_FIRST,
+ Temporal::Workflow::SDKFlags::SAVE_FIRST_TASK_SIGNALS
+ ]
+ end
+ it 'dispatcher invokes signal before start' do
+ allow(connection).to receive(:get_system_info).and_return(system_info)
+
+ expect(dispatcher).to receive(:dispatch).with(
+ Temporal::Workflow::Signal.new(signal_entry.workflow_execution_signaled_event_attributes.signal_name),
+ 'signaled',
+ [
+ signal_entry.workflow_execution_signaled_event_attributes.signal_name,
+ signal_entry.workflow_execution_signaled_event_attributes.input
+ ]
+ ).once.ordered
+ expect(dispatcher).to receive(:dispatch).with(
+ Temporal::Workflow::History::EventTarget.start_workflow, 'started', instance_of(Array)
+ ).once.ordered
+
+ state_manager.apply(history.next_window)
+ end
+ end
+ end
+
+ context 'with a marker' do
+ let(:activity_entry) { Fabricate(:api_activity_task_scheduled_event, event_id: 5) }
+ let(:marker_entry) { Fabricate(:api_marker_recorded_event, event_id: 8) }
+ let(:history) do
+ Temporal::Workflow::History.new(
+ [
+ Fabricate(:api_workflow_execution_started_event, event_id: 1),
+ Fabricate(:api_workflow_task_scheduled_event, event_id: 2),
+ Fabricate(:api_workflow_task_started_event, event_id: 3),
+ Fabricate(:api_workflow_task_completed_event, event_id: 4),
+ activity_entry,
+ Fabricate(:api_activity_task_started_event, event_id: 6),
+ Fabricate(:api_activity_task_completed_event, event_id: 7),
+ marker_entry,
+ Fabricate(:api_workflow_task_scheduled_event, event_id: 9),
+ Fabricate(:api_workflow_task_started_event, event_id: 10),
+ Fabricate(:api_workflow_task_completed_event, event_id: 11)
+ ]
+ )
+ end
+
+ it 'marker handled first' do
+ activity_target = nil
+ dispatcher.register_handler(Temporal::Workflow::History::EventTarget.start_workflow, 'started') do
+ activity_target, = state_manager.schedule(
+ Temporal::Workflow::Command::ScheduleActivity.new(
+ activity_id: activity_entry.event_id,
+ activity_type: activity_entry.activity_task_scheduled_event_attributes.activity_type,
+ input: nil,
+ task_queue: activity_entry.activity_task_scheduled_event_attributes.task_queue,
+ retry_policy: nil,
+ timeouts: nil,
+ headers: nil
+ )
+ )
+ end
+
+ # First task: starts workflow execution, schedules an activity
+ state_manager.apply(history.next_window)
+
+ expect(activity_target).not_to be_nil
+
+ activity_completed = false
+ dispatcher.register_handler(activity_target, 'completed') do
+ activity_completed = true
+ state_manager.schedule(
+ Temporal::Workflow::Command::RecordMarker.new(
+ name: marker_entry.marker_recorded_event_attributes.marker_name,
+ details: TEST_CONVERTER.to_payload_map({})
+ )
+ )
+
+ # Activity completed event comes before marker recorded event in history, but
+ # when activity completion is handled, the marker has already been handled.
+ expect(state_manager.send(:marker_ids).count).to eq(1)
+ end
+
+ # Second task: Handles activity completion, records marker
+ state_manager.apply(history.next_window)
+
+ expect(activity_completed).to eq(true)
+ end
+ end
+
+ def test_order(signal_first)
+ activity_target = nil
+ signaled = false
+
+ dispatcher.register_handler(Temporal::Workflow::History::EventTarget.start_workflow, 'started') do
+ activity_target, = state_manager.schedule(
+ Temporal::Workflow::Command::ScheduleActivity.new(
+ activity_id: activity_entry.event_id,
+ activity_type: activity_entry.activity_task_scheduled_event_attributes.activity_type,
+ input: nil,
+ task_queue: activity_entry.activity_task_scheduled_event_attributes.task_queue,
+ retry_policy: nil,
+ timeouts: nil,
+ headers: nil
+ )
+ )
+ end
+
+ dispatcher.register_handler(
+ Temporal::Workflow::Signal.new(
+ signal_entry.workflow_execution_signaled_event_attributes.signal_name
+ ),
+ 'signaled'
+ ) do
+ signaled = true
+ end
+
+ # First task: starts workflow execution, schedules an activity
+ state_manager.apply(history.next_window)
+
+ expect(activity_target).not_to be_nil
+ expect(signaled).to eq(false)
+
+ activity_completed = false
+ dispatcher.register_handler(activity_target, 'completed') do
+ activity_completed = true
+
+ expect(signaled).to eq(signal_first)
+ end
+
+ # Second task: Handles activity completion, signal
+ state_manager.apply(history.next_window)
+
+ expect(activity_completed).to eq(true)
+ expect(signaled).to eq(true)
+ end
+
+ context 'replaying with a signal' do
+ let(:activity_entry) { Fabricate(:api_activity_task_scheduled_event, event_id: 5) }
+ let(:signal_entry) { Fabricate(:api_workflow_execution_signaled_event, event_id: 8) }
+ let(:signal_handling_task) { Fabricate(:api_workflow_task_completed_event, event_id: 11) }
+ let(:history) do
+ Temporal::Workflow::History.new(
+ [
+ Fabricate(:api_workflow_execution_started_event, event_id: 1),
+ Fabricate(:api_workflow_task_scheduled_event, event_id: 2),
+ Fabricate(:api_workflow_task_started_event, event_id: 3),
+ Fabricate(:api_workflow_task_completed_event, event_id: 4),
+ activity_entry,
+ Fabricate(:api_activity_task_started_event, event_id: 6),
+ Fabricate(:api_activity_task_completed_event, event_id: 7),
+ signal_entry,
+ Fabricate(:api_workflow_task_scheduled_event, event_id: 9),
+ Fabricate(:api_workflow_task_started_event, event_id: 10),
+ signal_handling_task
+ ]
+ )
+ end
+
+ context 'no SDK flag' do
+ it 'signal inline' do
+ test_order(false)
+ end
+ end
+
+ context 'with SDK flag' do
+ let(:signal_handling_task) do
+ Fabricate(
+ :api_workflow_task_completed_event,
+ event_id: 11,
+ sdk_flags: [Temporal::Workflow::SDKFlags::HANDLE_SIGNALS_FIRST]
+ )
+ end
+ it 'signal first' do
+ allow(connection).to receive(:get_system_info).and_return(system_info)
+
+ test_order(true)
+ end
+
+ context 'even with legacy config enabled' do
+ let(:config) { Temporal::Configuration.new.tap { |c| c.legacy_signals = true } }
+ it 'signal first' do
+ allow(connection).to receive(:get_system_info).and_return(system_info)
+
+ test_order(true)
+ end
+ end
+ end
+ end
+
+ context 'not replaying with a signal' do
+ let(:activity_entry) { Fabricate(:api_activity_task_scheduled_event, event_id: 5) }
+ let(:signal_entry) { Fabricate(:api_workflow_execution_signaled_event, event_id: 8) }
+ let(:history) do
+ Temporal::Workflow::History.new(
+ [
+ Fabricate(:api_workflow_execution_started_event, event_id: 1),
+ Fabricate(:api_workflow_task_scheduled_event, event_id: 2),
+ Fabricate(:api_workflow_task_started_event, event_id: 3),
+ Fabricate(:api_workflow_task_completed_event, event_id: 4),
+ activity_entry,
+ Fabricate(:api_activity_task_started_event, event_id: 6),
+ Fabricate(:api_activity_task_completed_event, event_id: 7),
+ signal_entry,
+ Fabricate(:api_workflow_task_scheduled_event, event_id: 9)
+ ]
+ )
+ end
+
+ context 'signals first config disabled' do
+ let(:config) { Temporal::Configuration.new.tap { |c| c.legacy_signals = true } }
+ it 'signal inline' do
+ test_order(false)
+
+ expect(state_manager.new_sdk_flags_used).to be_empty
+ end
+ end
+
+ context 'signals first with default config' do
+ let(:config) { Temporal::Configuration.new }
+
+ it 'signal first' do
+ allow(connection).to receive(:get_system_info).and_return(system_info)
+
+ test_order(true)
+
+ expect(state_manager.new_sdk_flags_used).to eq(Set.new([Temporal::Workflow::SDKFlags::HANDLE_SIGNALS_FIRST]))
+ end
+ end
+ end
+
+ context 'not replaying with a signal in the first workflow task' do
+ let(:signal_entry) { Fabricate(:api_workflow_execution_signaled_event, event_id: 2) }
+ let(:history) do
+ Temporal::Workflow::History.new(
+ [
+ Fabricate(:api_workflow_execution_started_event, event_id: 1),
+ signal_entry,
+ Fabricate(:api_workflow_task_scheduled_event, event_id: 3)
+ ]
+ )
+ end
+
+ def test_order_one_task(*expected_sdk_flags)
+ allow(connection).to receive(:get_system_info).and_return(system_info)
+ signaled = false
+
+ dispatcher.register_handler(
+ Temporal::Workflow::Signal.new(
+ signal_entry.workflow_execution_signaled_event_attributes.signal_name
+ ),
+ 'signaled'
+ ) do
+ signaled = true
+ end
+
+ state_manager.apply(history.next_window)
+ expect(state_manager.new_sdk_flags_used).to eq(Set.new(expected_sdk_flags))
+ expect(signaled).to eq(true)
+ end
+
+ context 'default config' do
+ let(:config) { Temporal::Configuration.new }
+
+ it 'signal first' do
+ test_order_one_task(
+ Temporal::Workflow::SDKFlags::SAVE_FIRST_TASK_SIGNALS
+ )
+ end
+ end
+
+ context 'signals in first task disabled' do
+ let(:config) { Temporal::Configuration.new.tap { |c| c.no_signals_in_first_task = true } }
+ it 'signal inline' do
+ test_order_one_task(Temporal::Workflow::SDKFlags::HANDLE_SIGNALS_FIRST)
+ end
+ end
+ end
+ end
+
+ describe '#history_size' do
+ let(:config) { Temporal::Configuration.new }
+ let(:history_size_bytes) { 27 }
+ let(:suggest_continue_as_new) { true }
+ let(:start_workflow_execution_event) { Fabricate(:api_workflow_execution_started_event) }
+ let(:workflow_task_scheduled_event) { Fabricate(:api_workflow_task_scheduled_event, event_id: 2) }
+ let(:workflow_task_started_event) do
+ Fabricate(
+ :api_workflow_task_started_event,
+ event_id: 3,
+ history_size_bytes: history_size_bytes,
+ suggest_continue_as_new: suggest_continue_as_new)
+ end
+
+ it 'has correct event count' do
+ state_manager = described_class.new(Temporal::Workflow::Dispatcher.new, config)
+
+ window = Temporal::Workflow::History::Window.new
+ window.add(Temporal::Workflow::History::Event.new(start_workflow_execution_event))
+ window.add(Temporal::Workflow::History::Event.new(workflow_task_scheduled_event))
+ window.add(Temporal::Workflow::History::Event.new(workflow_task_started_event))
+
+ state_manager.apply(window)
+
+ expect(state_manager.history_size).to eq(
+ Temporal::Workflow::History::Size.new(
+ events: 4, # comes from event id of started + 1
+ bytes: history_size_bytes,
+ suggest_continue_as_new: suggest_continue_as_new
+ )
+ )
+ end
+ end
+
+ describe "#final_commands" do
+ let(:dispatcher) { Temporal::Workflow::Dispatcher.new }
+ let(:state_manager) do
+ Temporal::Workflow::StateManager.new(dispatcher, config)
+ end
+
+ let(:config) { Temporal::Configuration.new }
+
+ it "preserves canceled activity or timer commands when not completed" do
+ schedule_activity_command = Temporal::Workflow::Command::ScheduleActivity.new
+ state_manager.schedule(schedule_activity_command)
+
+ start_timer_command = Temporal::Workflow::Command::StartTimer.new
+ state_manager.schedule(start_timer_command)
+
+ cancel_activity_command = Temporal::Workflow::Command::RequestActivityCancellation.new(
+ activity_id: schedule_activity_command.activity_id
+ )
+ state_manager.schedule(cancel_activity_command)
+
+ cancel_timer_command = Temporal::Workflow::Command::CancelTimer.new(
+ timer_id: start_timer_command.timer_id
+ )
+ state_manager.schedule(cancel_timer_command)
+
+ expect(state_manager.final_commands).to(
+ eq(
+ [
+ [1, schedule_activity_command],
+ [2, start_timer_command],
+ [3, cancel_activity_command],
+ [4, cancel_timer_command]
+ ]
+ )
+ )
+ end
+
+ it "drop cancel activity command when completed" do
+ schedule_activity_command = Temporal::Workflow::Command::ScheduleActivity.new
+ state_manager.schedule(schedule_activity_command)
+
+ cancel_command = Temporal::Workflow::Command::RequestActivityCancellation.new(
+ activity_id: schedule_activity_command.activity_id
+ )
+ state_manager.schedule(cancel_command)
+
+ # Fake completing the activity
+ window = Temporal::Workflow::History::Window.new
+ # The fake assumes an activity event completed two events ago, so fix the event id to +2
+ window.add(
+ Temporal::Workflow::History::Event.new(
+ Fabricate(:api_activity_task_completed_event, event_id: schedule_activity_command.activity_id + 2)
+ )
+ )
+ state_manager.apply(window)
+
+ expect(state_manager.final_commands).to(eq([[1, schedule_activity_command]]))
+ end
+
+ it "drop cancel timer command when completed" do
+ start_timer_command = Temporal::Workflow::Command::StartTimer.new
+ state_manager.schedule(start_timer_command)
+
+ cancel_command = Temporal::Workflow::Command::CancelTimer.new(
+ timer_id: start_timer_command.timer_id
+ )
+ state_manager.schedule(cancel_command)
+
+ # Fake completing the timer
+ window = Temporal::Workflow::History::Window.new
+ # The fake assumes an activity event completed four events ago, so fix the event id to +4
+ window.add(
+ Temporal::Workflow::History::Event.new(
+ Fabricate(:api_timer_fired_event, event_id: start_timer_command.timer_id + 4)
+ )
+ )
+ state_manager.apply(window)
+
+ expect(state_manager.final_commands).to(eq([[1, start_timer_command]]))
+ end
+ end
+
+
+ describe '#search_attributes' do
+ let(:initial_search_attributes) do
+ {
+ 'CustomAttribute1' => 42,
+ 'CustomAttribute2' => 10
+ }
+ end
+ let(:start_workflow_execution_event) do
+ Fabricate(:api_workflow_execution_started_event, search_attributes: initial_search_attributes)
+ end
+ let(:start_workflow_execution_event_no_search_attributes) do
+ Fabricate(:api_workflow_execution_started_event)
+ end
+ let(:workflow_task_started_event) { Fabricate(:api_workflow_task_started_event, event_id: 2) }
+ let(:upserted_attributes_1) do
+ {
+ 'CustomAttribute3' => 'foo',
+ 'CustomAttribute2' => 8
+ }
+ end
+ let(:upsert_search_attribute_event_1) do
+ Fabricate(:api_upsert_search_attributes_event, search_attributes: upserted_attributes_1)
+ end
+ let(:upserted_attributes_2) do
+ {
+ 'CustomAttribute3' => 'bar',
+ 'CustomAttribute4' => 10
+ }
+ end
+ let(:upsert_search_attribute_event_2) do
+ Fabricate(:api_upsert_search_attributes_event,
+ event_id: 4,
+ search_attributes: upserted_attributes_2)
+ end
+ let(:upsert_empty_search_attributes_event) do
+ Fabricate(:api_upsert_search_attributes_event, search_attributes: {})
+ end
+
+ it 'initial merges with upserted' do
+ state_manager = described_class.new(Temporal::Workflow::Dispatcher.new, Temporal::Configuration.new)
+
+ window = Temporal::Workflow::History::Window.new
+ window.add(Temporal::Workflow::History::Event.new(start_workflow_execution_event))
+ window.add(Temporal::Workflow::History::Event.new(upsert_search_attribute_event_1))
+
+ command = Temporal::Workflow::Command::UpsertSearchAttributes.new(
+ search_attributes: upserted_attributes_1
+ )
+
+ state_manager.schedule(command)
+ # Attributes from command are applied immediately, then merged when
+ # history window is replayed below. This ensures newly upserted
+ # search attributes are available immediately in workflow code.
+ expect(state_manager.search_attributes).to eq(upserted_attributes_1)
+
+ state_manager.apply(window)
+
+ expect(state_manager.search_attributes).to eq(
+ {
+ 'CustomAttribute1' => 42, # from initial (not overridden)
+ 'CustomAttribute2' => 8, # only from upsert
+ 'CustomAttribute3' => 'foo' # overridden by upsert
+ }
+ )
+ end
+
+ it 'initial and upsert treated as empty hash' do
+ state_manager = described_class.new(Temporal::Workflow::Dispatcher.new, Temporal::Configuration.new)
+
+ window = Temporal::Workflow::History::Window.new
+ window.add(Temporal::Workflow::History::Event.new(start_workflow_execution_event_no_search_attributes))
+ window.add(Temporal::Workflow::History::Event.new(upsert_empty_search_attributes_event))
+
+ command = Temporal::Workflow::Command::UpsertSearchAttributes.new(search_attributes: {})
+ expect(state_manager.search_attributes).to eq({})
+
+ state_manager.schedule(command)
+ state_manager.apply(window)
+
+ expect(state_manager.search_attributes).to eq({})
+ end
+
+ it 'multiple upserts merge' do
+ state_manager = described_class.new(Temporal::Workflow::Dispatcher.new, Temporal::Configuration.new)
+
+ window_1 = Temporal::Workflow::History::Window.new
+ window_1.add(Temporal::Workflow::History::Event.new(workflow_task_started_event))
+ window_1.add(Temporal::Workflow::History::Event.new(upsert_search_attribute_event_1))
+
+ command_1 = Temporal::Workflow::Command::UpsertSearchAttributes.new(search_attributes: upserted_attributes_1)
+ state_manager.schedule(command_1)
+ state_manager.apply(window_1)
+
+ expect(state_manager.search_attributes).to eq(upserted_attributes_1)
+
+ window_2 = Temporal::Workflow::History::Window.new
+ window_2.add(Temporal::Workflow::History::Event.new(upsert_search_attribute_event_2))
+
+ command_2 = Temporal::Workflow::Command::UpsertSearchAttributes.new(search_attributes: upserted_attributes_2)
+ state_manager.schedule(command_2)
+ state_manager.apply(window_2)
+
+ expect(state_manager.search_attributes).to eq(
+ {
+ 'CustomAttribute2' => 8,
+ 'CustomAttribute3' => 'bar',
+ 'CustomAttribute4' => 10
+ }
+ )
+ end
+ end
+end
diff --git a/spec/unit/lib/temporal/workflow/task_processor_spec.rb b/spec/unit/lib/temporal/workflow/task_processor_spec.rb
index d1537bcc..6ad3c12c 100644
--- a/spec/unit/lib/temporal/workflow/task_processor_spec.rb
+++ b/spec/unit/lib/temporal/workflow/task_processor_spec.rb
@@ -1,19 +1,29 @@
-require 'temporal/workflow/task_processor'
-require 'temporal/middleware/chain'
require 'temporal/configuration'
+require 'temporal/metric_keys'
+require 'temporal/middleware/chain'
+require 'temporal/workflow/task_processor'
describe Temporal::Workflow::TaskProcessor do
- subject { described_class.new(task, namespace, lookup, middleware_chain, config) }
+ subject do
+ described_class.new(task, task_queue, namespace, lookup, middleware_chain, workflow_middleware_chain, config, binary_checksum)
+ end
let(:namespace) { 'test-namespace' }
+ let(:task_queue) { 'test-queue' }
let(:lookup) { instance_double('Temporal::ExecutableLookup', find: nil) }
- let(:task) { Fabricate(:api_workflow_task, workflow_type: api_workflow_type) }
+ let(:query) { nil }
+ let(:queries) { nil }
+ let(:task) do
+ Fabricate(:api_workflow_task, { workflow_type: api_workflow_type, query: query, queries: queries }.compact)
+ end
let(:api_workflow_type) { Fabricate(:api_workflow_type, name: workflow_name) }
let(:workflow_name) { 'TestWorkflow' }
let(:connection) { instance_double('Temporal::Connection::GRPC') }
let(:middleware_chain) { Temporal::Middleware::Chain.new }
- let(:input) { ['arg1', 'arg2'] }
+ let(:workflow_middleware_chain) { Temporal::Middleware::Chain.new }
+ let(:input) { %w[arg1 arg2] }
let(:config) { Temporal::Configuration.new }
+ let(:binary_checksum) { 'v1.0.0' }
describe '#process' do
let(:context) { instance_double('Temporal::Workflow::Context') }
@@ -24,11 +34,13 @@
.with(config.for_connection)
.and_return(connection)
allow(connection).to receive(:respond_workflow_task_completed)
+ allow(connection).to receive(:respond_query_task_completed)
allow(connection).to receive(:respond_workflow_task_failed)
allow(middleware_chain).to receive(:invoke).and_call_original
allow(Temporal.metrics).to receive(:timing)
+ allow(Temporal.metrics).to receive(:increment)
end
context 'when workflow is not registered' do
@@ -44,7 +56,7 @@
reported_error = nil
reported_metadata = nil
- Temporal.configuration.on_error do |error, metadata: nil|
+ config.on_error do |error, metadata: nil|
reported_error = error
reported_metadata = metadata
end
@@ -54,21 +66,40 @@
expect(reported_error).to be_an_instance_of(Temporal::WorkflowNotRegistered)
expect(reported_metadata).to be_an_instance_of(Temporal::Metadata::WorkflowTask)
end
+
+ it 'emits workflow task failure metric' do
+ subject.process
+
+ expect(Temporal.metrics)
+ .to have_received(:increment)
+ .with(
+ Temporal::MetricKeys::WORKFLOW_TASK_EXECUTION_FAILED,
+ hash_including({
+ workflow: workflow_name,
+ namespace: namespace
+ })
+ )
+ end
end
context 'when workflow is registered' do
let(:workflow_class) { double('Temporal::Workflow', execute_in_context: nil) }
let(:executor) { double('Temporal::Workflow::Executor') }
let(:commands) { double('commands') }
+ let(:new_sdk_flags_used) { double('new_sdk_flags_used') }
+ let(:run_result) do
+ Temporal::Workflow::Executor::RunResult.new(commands: commands, new_sdk_flags_used: new_sdk_flags_used)
+ end
before do
allow(lookup).to receive(:find).with(workflow_name).and_return(workflow_class)
allow(Temporal::Workflow::Executor).to receive(:new).and_return(executor)
- allow(executor).to receive(:run) { workflow_class.execute_in_context(context, input); commands }
+ allow(executor).to receive(:run) { workflow_class.execute_in_context(context, input) }.and_return(run_result)
+ allow(executor).to receive(:process_queries)
end
context 'when workflow task completes' do
- # Note: This is a bit of a pointless test because I short circuit this with stubs.
+ # NOTE: This is a bit of a pointless test because I short circuit this with stubs.
# The code does not drop down into the state machine and so forth.
it 'runs the specified task' do
subject.process
@@ -84,20 +115,87 @@
)
end
- it 'completes the workflow task' do
- subject.process
+ context 'when workflow task queries are included' do
+ let(:query_id) { SecureRandom.uuid }
+ let(:query_result) { Temporal::Workflow::QueryResult.answer(42) }
+ let(:queries) do
+ Google::Protobuf::Map.new(:string, :message, Temporalio::Api::Query::V1::WorkflowQuery).tap do |map|
+ map[query_id] = Fabricate(:api_workflow_query)
+ end
+ end
- expect(connection)
- .to have_received(:respond_workflow_task_completed)
- .with(task_token: task.task_token, commands: commands)
+ before do
+ allow(executor).to receive(:process_queries).and_return(query_id => query_result)
+ end
+
+ it 'completes the workflow task with query results' do
+ subject.process
+
+ expect(executor)
+ .to have_received(:process_queries)
+ .with(query_id => an_instance_of(Temporal::Workflow::TaskProcessor::Query))
+ expect(connection)
+ .to have_received(:respond_workflow_task_completed)
+ .with(
+ namespace: namespace,
+ task_token: task.task_token,
+ commands: commands,
+ binary_checksum: binary_checksum,
+ query_results: { query_id => query_result },
+ new_sdk_flags_used: new_sdk_flags_used
+ )
+ end
end
- it 'ignores connection exception' do
- allow(connection)
- .to receive(:respond_workflow_task_completed)
- .and_raise(StandardError)
+ context 'when deprecated task query is present' do
+ let(:query) { Fabricate(:api_workflow_query) }
+ let(:result) { Temporal::Workflow::QueryResult.answer(42) }
- subject.process
+ before do
+ allow(executor).to receive(:process_queries).and_return(legacy_query: result)
+ end
+
+ it 'completes the workflow query task with the result' do
+ subject.process
+
+ expect(executor).to have_received(:process_queries).with(
+ legacy_query: an_instance_of(Temporal::Workflow::TaskProcessor::Query)
+ )
+ expect(connection).to_not have_received(:respond_workflow_task_completed)
+ expect(connection)
+ .to have_received(:respond_query_task_completed)
+ .with(
+ task_token: task.task_token,
+ namespace: namespace,
+ query_result: result
+ )
+ end
+ end
+
+ context 'when deprecated task query is not present' do
+ it 'completes the workflow task' do
+ subject.process
+
+ expect(connection).to_not have_received(:respond_query_task_completed)
+ expect(connection)
+ .to have_received(:respond_workflow_task_completed)
+ .with(
+ namespace: namespace,
+ task_token: task.task_token,
+ commands: commands,
+ query_results: nil,
+ binary_checksum: binary_checksum,
+ new_sdk_flags_used: new_sdk_flags_used
+ )
+ end
+
+ it 'ignores connection exception' do
+ allow(connection)
+ .to receive(:respond_workflow_task_completed)
+ .and_raise(StandardError)
+
+ subject.process
+ end
end
it 'sends queue_time metric' do
@@ -105,7 +203,14 @@
expect(Temporal.metrics)
.to have_received(:timing)
- .with('workflow_task.queue_time', an_instance_of(Integer), workflow: workflow_name)
+ .with(
+ Temporal::MetricKeys::WORKFLOW_TASK_QUEUE_TIME,
+ an_instance_of(Integer),
+ hash_including({
+ workflow: workflow_name,
+ namespace: namespace
+ })
+ )
end
it 'sends latency metric' do
@@ -113,7 +218,14 @@
expect(Temporal.metrics)
.to have_received(:timing)
- .with('workflow_task.latency', an_instance_of(Integer), workflow: workflow_name)
+ .with(
+ Temporal::MetricKeys::WORKFLOW_TASK_LATENCY,
+ an_instance_of(Integer),
+ hash_including({
+ workflow: workflow_name,
+ namespace: namespace
+ })
+ )
end
end
@@ -122,47 +234,83 @@
before { allow(workflow_class).to receive(:execute_in_context).and_raise(exception) }
- it 'fails the workflow task' do
- subject.process
+ context 'when deprecated task query is present' do
+ let(:query) { Fabricate(:api_workflow_query) }
- expect(connection)
- .to have_received(:respond_workflow_task_failed)
- .with(
- task_token: task.task_token,
- cause: Temporal::Api::Enums::V1::WorkflowTaskFailedCause::WORKFLOW_TASK_FAILED_CAUSE_WORKFLOW_WORKER_UNHANDLED_FAILURE,
- exception: exception
- )
- end
+ it 'fails the workflow task' do
+ subject.process
- it 'does not fail the task beyond the first attempt' do
- task.attempt = 2
- subject.process
+ expect(connection)
+ .to have_received(:respond_workflow_task_failed)
+ .with(
+ namespace: namespace,
+ task_token: task.task_token,
+ cause: Temporalio::Api::Enums::V1::WorkflowTaskFailedCause::WORKFLOW_TASK_FAILED_CAUSE_WORKFLOW_WORKER_UNHANDLED_FAILURE,
+ exception: exception,
+ binary_checksum: binary_checksum
+ )
+ end
- expect(connection)
- .not_to have_received(:respond_workflow_task_failed)
+ it 'emits workflow task failure metric' do
+ subject.process
+
+ expect(Temporal.metrics)
+ .to have_received(:increment)
+ .with(
+ Temporal::MetricKeys::WORKFLOW_TASK_EXECUTION_FAILED,
+ hash_including({
+ workflow: workflow_name,
+ namespace: namespace
+ })
+ )
+ end
end
- it 'ignores connection exception' do
- allow(connection)
- .to receive(:respond_workflow_task_failed)
- .and_raise(StandardError)
+ context 'when deprecated task query is not present' do
+ it 'fails the workflow task' do
+ subject.process
- subject.process
- end
+ expect(connection)
+ .to have_received(:respond_workflow_task_failed)
+ .with(
+ namespace: namespace,
+ task_token: task.task_token,
+ cause: Temporalio::Api::Enums::V1::WorkflowTaskFailedCause::WORKFLOW_TASK_FAILED_CAUSE_WORKFLOW_WORKER_UNHANDLED_FAILURE,
+ exception: exception,
+ binary_checksum: binary_checksum
+ )
+ end
+
+ it 'does not fail the task beyond the first attempt' do
+ task.attempt = 2
+ subject.process
- it 'calls error_handlers' do
- reported_error = nil
- reported_metadata = nil
+ expect(connection)
+ .not_to have_received(:respond_workflow_task_failed)
+ end
+
+ it 'ignores connection exception' do
+ allow(connection)
+ .to receive(:respond_workflow_task_failed)
+ .and_raise(StandardError)
- Temporal.configuration.on_error do |error, metadata: nil|
- reported_error = error
- reported_metadata = metadata
+ subject.process
end
- subject.process
+ it 'calls error_handlers' do
+ reported_error = nil
+ reported_metadata = nil
- expect(reported_error).to be_an_instance_of(StandardError)
- expect(reported_metadata).to be_an_instance_of(Temporal::Metadata::WorkflowTask)
+ config.on_error do |error, metadata: nil|
+ reported_error = error
+ reported_metadata = metadata
+ end
+
+ subject.process
+
+ expect(reported_error).to be_an_instance_of(StandardError)
+ expect(reported_metadata).to be_an_instance_of(Temporal::Metadata::WorkflowTask)
+ end
end
it 'sends queue_time metric' do
@@ -170,7 +318,14 @@
expect(Temporal.metrics)
.to have_received(:timing)
- .with('workflow_task.queue_time', an_instance_of(Integer), workflow: workflow_name)
+ .with(
+ Temporal::MetricKeys::WORKFLOW_TASK_QUEUE_TIME,
+ an_instance_of(Integer),
+ hash_including({
+ workflow: workflow_name,
+ namespace: namespace
+ })
+ )
end
it 'sends latency metric' do
@@ -178,22 +333,53 @@
expect(Temporal.metrics)
.to have_received(:timing)
- .with('workflow_task.latency', an_instance_of(Integer), workflow: workflow_name)
+ .with(
+ Temporal::MetricKeys::WORKFLOW_TASK_LATENCY,
+ an_instance_of(Integer),
+ hash_including({
+ workflow: workflow_name,
+ namespace: namespace
+ })
+ )
+ end
+ end
+
+ context 'when legacy query fails' do
+ let(:query) { Fabricate(:api_workflow_query) }
+ let(:exception) { StandardError.new('workflow task failed') }
+ let(:query_failure) { Temporal::Workflow::QueryResult.failure(exception) }
+
+ before do
+ allow(executor)
+ .to receive(:process_queries)
+ .and_return(legacy_query: query_failure)
+ end
+
+ it 'fails the workflow task' do
+ subject.process
+
+ expect(connection)
+ .to have_received(:respond_query_task_completed)
+ .with(
+ namespace: namespace,
+ task_token: task.task_token,
+ query_result: query_failure
+ )
end
end
context 'when history is paginated' do
- let(:task) { Fabricate(:api_paginated_workflow_task, workflow_type: api_workflow_type) }
+ let(:page_one) { 'page-1' }
+ let(:task) { Fabricate(:api_workflow_task, workflow_type: api_workflow_type, next_page_token: page_one) }
let(:event) { Fabricate(:api_workflow_execution_started_event) }
- let(:history_response) { Fabricate(:workflow_execution_history, events: [event]) }
- before do
+ it 'fetches additional pages' do
+ history_response = Fabricate(:workflow_execution_history, events: [event])
+
allow(connection)
.to receive(:get_workflow_execution_history)
.and_return(history_response)
- end
- it 'fetches additional pages' do
subject.process
expect(connection)
@@ -207,19 +393,62 @@
.once
end
+ # Temporal server sometimes sends empty history pages but with a next_page_token. Best practice, used
+ # across the various SDKs, is to keep paginating.
context 'when a page has no events' do
- let(:history_response) { Fabricate(:workflow_execution_history, events: []) }
+ let(:page_two) { 'page-2' }
+ let(:page_three) { 'page-3' }
+ let(:first_history_response) do
+ Fabricate(:workflow_execution_history, events: [event], _next_page_token: page_two)
+ end
- it 'fails a workflow task' do
- subject.process
+ let(:empty_history_response) do
+ Fabricate(:workflow_execution_history, events: [], _next_page_token: page_three)
+ end
+ let(:final_event) { Fabricate(:api_workflow_execution_completed_event) }
+ let(:final_history_response) do
+ Fabricate(:workflow_execution_history, events: [final_event])
+ end
- expect(connection)
- .to have_received(:respond_workflow_task_failed)
+ it 'continues asking for the next workflow task and populates all the events in the history' do
+ # page_one: [event], -> page_two
+ # page_two: [], -> page_three
+ # page_three: [final_event]
+ allow(connection)
+ .to receive(:get_workflow_execution_history)
.with(
- task_token: task.task_token,
- cause: Temporal::Api::Enums::V1::WorkflowTaskFailedCause::WORKFLOW_TASK_FAILED_CAUSE_WORKFLOW_WORKER_UNHANDLED_FAILURE,
- exception: an_instance_of(Temporal::UnexpectedResponse)
+ namespace: namespace,
+ workflow_id: task.workflow_execution.workflow_id,
+ run_id: task.workflow_execution.run_id,
+ next_page_token: page_one
)
+ .and_return(first_history_response)
+
+ allow(connection)
+ .to receive(:get_workflow_execution_history)
+ .with(
+ namespace: namespace,
+ workflow_id: task.workflow_execution.workflow_id,
+ run_id: task.workflow_execution.run_id,
+ next_page_token: page_two
+ )
+ .and_return(empty_history_response)
+
+ allow(connection)
+ .to receive(:get_workflow_execution_history)
+ .with(
+ namespace: namespace,
+ workflow_id: task.workflow_execution.workflow_id,
+ run_id: task.workflow_execution.run_id,
+ next_page_token: page_three
+ )
+ .and_return(final_history_response)
+
+ allow(Temporal::Workflow::History).to receive(:new)
+
+ subject.process
+
+ expect(Temporal::Workflow::History).to have_received(:new).with([event, final_event])
end
end
end
diff --git a/spec/unit/lib/temporal/workflow_spec.rb b/spec/unit/lib/temporal/workflow_spec.rb
index b8f6af5f..fb8cc32a 100644
--- a/spec/unit/lib/temporal/workflow_spec.rb
+++ b/spec/unit/lib/temporal/workflow_spec.rb
@@ -1,6 +1,78 @@
require 'temporal/workflow'
+require 'temporal/workflow/context'
require 'shared_examples/an_executable'
describe Temporal::Workflow do
it_behaves_like 'an executable'
+
+ class ArgsWorkflow < Temporal::Workflow
+ def execute(a)
+ 'args result'
+ end
+ end
+
+ class KwargsWorkflow < Temporal::Workflow
+ def execute(a, b:, c:)
+ 'kwargs result'
+ end
+ end
+
+ subject { described_class.new(ctx) }
+ let(:ctx) { instance_double('Temporal::Workflow::Context') }
+
+ before do
+ allow(ctx).to receive(:completed?).and_return(true)
+ end
+
+ describe '.execute_in_context' do
+ subject { ArgsWorkflow.new(ctx) }
+
+ let(:input) { ['test'] }
+
+ before do
+ allow(described_class).to receive(:new).and_return(subject)
+ end
+
+ it 'passes the context' do
+ described_class.execute_in_context(ctx, input)
+
+ expect(described_class).to have_received(:new).with(ctx)
+ end
+
+ it 'calls #execute' do
+ expect(subject).to receive(:execute).with(*input)
+
+ described_class.execute_in_context(ctx, input)
+ end
+
+ context 'when using keyword arguments' do
+ subject { KwargsWorkflow.new(ctx) }
+
+ let(:input) { ['test', { b: 'b', c: 'c' }] }
+
+ it 'passes the context' do
+ described_class.execute_in_context(ctx, input)
+
+ expect(described_class).to have_received(:new).with(ctx)
+ end
+
+ it 'calls #execute' do
+ expect(subject).to receive(:execute).with('test', b: 'b', c: 'c')
+
+ described_class.execute_in_context(ctx, input)
+ end
+
+ it 'does not raise an ArgumentError' do
+ expect {
+ described_class.execute_in_context(ctx, input)
+ }.not_to raise_error
+ end
+ end
+ end
+
+ describe '#execute' do
+ it 'is not implemented on a superclass' do
+ expect { subject.execute }.to raise_error(NotImplementedError)
+ end
+ end
end
diff --git a/spec/unit/lib/temporal_spec.rb b/spec/unit/lib/temporal_spec.rb
index 0d9daefd..49e57664 100644
--- a/spec/unit/lib/temporal_spec.rb
+++ b/spec/unit/lib/temporal_spec.rb
@@ -28,6 +28,10 @@
describe '.register_namespace' do
it_behaves_like 'a forwarded method', :register_namespace, 'test-namespace', 'This is a test namespace'
end
+
+ describe '.describe_namespace' do
+ it_behaves_like 'a forwarded method', :describe_namespace, 'test-namespace'
+ end
describe '.signal_workflow' do
it_behaves_like 'a forwarded method', :signal_workflow, 'TestWorkflow', 'TST_SIGNAL', 'x', 'y'
@@ -63,19 +67,24 @@
it 'calls a block with the configuration' do
expect do |block|
described_class.configure(&block)
- end.to yield_with_args(described_class.configuration)
+ end.to yield_with_args(described_class.send(:config))
end
end
describe '.configuration' do
+ before { allow(described_class).to receive(:warn) }
+
it 'returns Temporal::Configuration object' do
expect(described_class.configuration).to be_an_instance_of(Temporal::Configuration)
+ expect(described_class)
+ .to have_received(:warn)
+ .with('[DEPRECATION] This method is now deprecated without a substitution')
end
end
describe '.logger' do
it 'returns preconfigured Temporal logger' do
- expect(described_class.logger).to eq(described_class.configuration.logger)
+ expect(described_class.logger).to eq(described_class.send(:config).logger)
end
end
diff --git a/temporal.gemspec b/temporal.gemspec
index 59ef1192..3cc624ab 100644
--- a/temporal.gemspec
+++ b/temporal.gemspec
@@ -14,11 +14,14 @@ Gem::Specification.new do |spec|
spec.require_paths = ['lib']
spec.files = Dir["{lib,rbi}/**/*.*"] + %w(temporal.gemspec Gemfile LICENSE README.md)
+ spec.add_dependency 'base64'
spec.add_dependency 'grpc'
spec.add_dependency 'oj'
spec.add_development_dependency 'pry'
- spec.add_development_dependency 'rspec'
+ # TODO: Investigate spec failure surfacing in RSpec 3.11
+ spec.add_development_dependency 'rspec', '~> 3.10.0'
spec.add_development_dependency 'fabrication'
spec.add_development_dependency 'grpc-tools'
+ spec.add_development_dependency 'yard'
end