Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions common/testing/mocksdk/client_mock.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions common/testing/mocksdk/generate.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ mockgen -package "$GOPACKAGE" go.temporal.io/sdk/client Client | \
mockgen -package "$GOPACKAGE" go.temporal.io/sdk/worker Worker | \
sed -e 's,internal.RegisterWorkflowOptions,workflow.RegisterOptions,g' \
-e 's,internal.RegisterActivityOptions,activity.RegisterOptions,g' \
-e 's,internal.DynamicRegisterWorkflowOptions,workflow.DynamicRegisterOptions,g' \
-e 's,internal.DynamicRegisterActivityOptions,activity.DynamicRegisterOptions,g' \
-e 's,internal "go.temporal.io/sdk/internal",activity "go.temporal.io/sdk/activity"\n\tworkflow "go.temporal.io/sdk/workflow",' | \
goimports > worker_mock.go

Expand Down
24 changes: 24 additions & 0 deletions common/testing/mocksdk/worker_mock.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 9 additions & 0 deletions common/testing/testvars/test_vars.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import (
taskqueuepb "go.temporal.io/api/taskqueue/v1"
updatepb "go.temporal.io/api/update/v1"
workflowpb "go.temporal.io/api/workflow/v1"
"go.temporal.io/sdk/worker"
deploymentspb "go.temporal.io/server/api/deployment/v1"
persistencespb "go.temporal.io/server/api/persistence/v1"
"go.temporal.io/server/common/namespace"
Expand Down Expand Up @@ -254,6 +255,14 @@ func (tv *TestVars) ExternalDeploymentVersion() *deploymentpb.WorkerDeploymentVe
}
}

// SDKDeploymentVersion returns SDK worker deployment version
func (tv *TestVars) SDKDeploymentVersion() worker.WorkerDeploymentVersion {
return worker.WorkerDeploymentVersion{
BuildId: tv.BuildID(),
DeploymentName: tv.DeploymentSeries(),
}
}

// DeploymentVersionString returns v31 string
func (tv *TestVars) DeploymentVersionString() string {
return worker_versioning.WorkerDeploymentVersionToStringV31(tv.DeploymentVersion())
Expand Down
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ require (
go.opentelemetry.io/otel/sdk/metric v1.34.0
go.opentelemetry.io/otel/trace v1.34.0
go.temporal.io/api v1.52.0
go.temporal.io/sdk v1.34.0
go.temporal.io/sdk v1.35.0
go.uber.org/automaxprocs v1.6.0
go.uber.org/fx v1.23.0
go.uber.org/mock v0.5.0
Expand Down
4 changes: 2 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -401,8 +401,8 @@ go.opentelemetry.io/proto/otlp v1.5.0 h1:xJvq7gMzB31/d406fB8U5CBdyQGw4P399D1aQWU
go.opentelemetry.io/proto/otlp v1.5.0/go.mod h1:keN8WnHxOy8PG0rQZjJJ5A2ebUoafqWp0eVQ4yIXvJ4=
go.temporal.io/api v1.52.0 h1:Tn69z2nhQeXtofa1/j/MbwPHnFRM9+13xqYmFl/KFjM=
go.temporal.io/api v1.52.0/go.mod h1:iaxoP/9OXMJcQkETTECfwYq4cw/bj4nwov8b3ZLVnXM=
go.temporal.io/sdk v1.34.0 h1:VLg/h6ny7GvLFVoQPqz2NcC93V9yXboQwblkRvZ1cZE=
go.temporal.io/sdk v1.34.0/go.mod h1:iE4U5vFrH3asOhqpBBphpj9zNtw8btp8+MSaf5A0D3w=
go.temporal.io/sdk v1.35.0 h1:lRNAQ5As9rLgYa7HBvnmKyzxLcdElTuoFJ0FXM/AsLQ=
go.temporal.io/sdk v1.35.0/go.mod h1:1q5MuLc2MEJ4lneZTHJzpVebW2oZnyxoIOWX3oFVebw=
go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
# To use it, run a local server (any backend) and ensure the following dynamic configs are enabled
# in the dynamic config file (config/dynamicconfig/development-sql.yaml):
#
# system.enableDeploymentVersions=true
# matching.PollerHistoryTTL=1s
# matching.wv.VersionDrainageStatusVisibilityGracePeriod=5s
# matching.wv.VersionDrainageStatusRefreshInterval=5s
Expand All @@ -18,7 +17,7 @@ deploymentName="foo"
version="1.0"

# Expected workflow counts - users can override these if their changes are expected to generate more workflows which will be true when a breaking change to
# these worfklows is introduced.
# these workflows is introduced.
# These values are used by the replay tester to validate that your workflow changes haven't accidentally created additional executions.
EXPECTED_DEPLOYMENT_WORKFLOWS=${EXPECTED_DEPLOYMENT_WORKFLOWS:-12}
EXPECTED_VERSION_WORKFLOWS=${EXPECTED_VERSION_WORKFLOWS:-14}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Expected workflow counts for replay testing
# Generated by generate_history.sh on Tue Aug 19 18:08:43 PDT 2025
EXPECTED_DEPLOYMENT_WORKFLOWS=12
EXPECTED_VERSION_WORKFLOWS=14
ACTUAL_DEPLOYMENT_WORKFLOWS=12
ACTUAL_VERSION_WORKFLOWS=14
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
94 changes: 66 additions & 28 deletions service/worker/workerdeployment/replaytester/worker/worker.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package main

import (
"context"
"fmt"
"log"
"time"

Expand All @@ -20,18 +21,23 @@ func main() {
defer c.Close()

deploymentName := "foo"
build1 := "1.0"
v1 := worker.WorkerDeploymentVersion{
DeploymentName: deploymentName,
BuildId: build1,
}
w1 := worker.New(c, "hello-world", worker.Options{
DeploymentOptions: worker.DeploymentOptions{
UseVersioning: true,
Version: deploymentName + ".1.0",
Version: v1,
DefaultVersioningBehavior: workflow.VersioningBehaviorPinned,
},
})

w2 := worker.New(c, "hello-world-2", worker.Options{
DeploymentOptions: worker.DeploymentOptions{
UseVersioning: true,
Version: deploymentName + ".1.0",
Version: v1,
DefaultVersioningBehavior: workflow.VersioningBehaviorPinned,
},
})
Expand Down Expand Up @@ -65,7 +71,7 @@ func main() {

// Update version metadata
_, err = dHandle.UpdateVersionMetadata(context.Background(), client.WorkerDeploymentUpdateVersionMetadataOptions{
Version: deploymentName + ".1.0",
Version: v1,
MetadataUpdate: client.WorkerDeploymentMetadataUpdate{
UpsertEntries: map[string]interface{}{
"key": "value",
Expand All @@ -78,63 +84,66 @@ func main() {

// Set ramping version to 1.0
_, err = dHandle.SetRampingVersion(context.Background(), client.WorkerDeploymentSetRampingVersionOptions{
Version: deploymentName + ".1.0",
BuildID: build1,
Percentage: 1,
})
if err != nil {
log.Fatalln("Unable to set ramping version", err)
}
verifyDeployment(dHandle, "__unversioned__", deploymentName+".1.0", client.WorkerDeploymentVersionDrainageStatusUnspecified)
verifyDeployment(dHandle, "", build1, 1, client.WorkerDeploymentVersionDrainageStatusUnspecified)

// Unset the ramping version
// Set the ramp percent to 0
// TODO(carlydf): Once we allow it, test setting ramping version to nil while current version is also nil
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

v1.35.0 of the SDK removes the ability to "unset" / "remove" the ramping version, you can only set it to nil (which now means unversioned) with a ramp percent of 0 for the same effect as what we previously called "unset."

We don't allow the ramping version of a worker deployment to be the same as the current version, so when the current version is nil, it is not allowed to "unset" the ramping version by setting it to nil, 0%

#8172 fixes that issue (customers have requested to be able to have current version == ramping version == nil when ramp percent is 0, so that they can reset their Worker Deployment to "unversioned" settings)

_, err = dHandle.SetRampingVersion(context.Background(), client.WorkerDeploymentSetRampingVersionOptions{
Version: "",
BuildID: build1,
Percentage: 0,
})
if err != nil {
log.Fatalln("Unable to unset ramping version", err)
log.Fatalln("Unable to set ramping version to zero", err)
}
verifyDeployment(dHandle, "__unversioned__", "", client.WorkerDeploymentVersionDrainageStatusDraining)
verifyDeployment(dHandle, "", build1, 0, client.WorkerDeploymentVersionDrainageStatusUnspecified)

// Set current version to 1.0
_, err = dHandle.SetCurrentVersion(context.Background(), client.WorkerDeploymentSetCurrentVersionOptions{
Version: deploymentName + ".1.0",
BuildID: build1,
IgnoreMissingTaskQueues: true,
})
if err != nil {
log.Fatalln("Unable to set current version", err)
}
verifyDeployment(dHandle, deploymentName+".1.0", "", client.WorkerDeploymentVersionDrainageStatusUnspecified)
verifyDeployment(dHandle, build1, "", 0, client.WorkerDeploymentVersionDrainageStatusUnspecified)

// Ramp the "__unversioned__" version
_, err = dHandle.SetRampingVersion(context.Background(), client.WorkerDeploymentSetRampingVersionOptions{
Version: "__unversioned__",
BuildID: "",
Percentage: 20,
IgnoreMissingTaskQueues: true,
})
if err != nil {
log.Fatalln("Unable to set ramping version", err)
}
verifyDeployment(dHandle, deploymentName+".1.0", "__unversioned__", client.WorkerDeploymentVersionDrainageStatusUnspecified)
verifyDeployment(dHandle, build1, "", 20, client.WorkerDeploymentVersionDrainageStatusUnspecified)

// Set current version to "__unversioned__"
_, err = dHandle.SetCurrentVersion(context.Background(), client.WorkerDeploymentSetCurrentVersionOptions{
Version: "__unversioned__",
BuildID: "",
IgnoreMissingTaskQueues: true,
})
if err != nil {
log.Fatalln("Unable to set current version", err)
}
verifyDeployment(dHandle, "__unversioned__", "", client.WorkerDeploymentVersionDrainageStatusDraining)
verifyDeployment(dHandle, "", "", 0, client.WorkerDeploymentVersionDrainageStatusDraining)

// Simulating a scenario when a drained version is reactivated and then re-deactivated.

// Waiting for the version 1.0 to become drained.
time.Sleep(8 * time.Second)
// Make sure 1.0 is drained.
verifyDeployment(dHandle, "__unversioned__", "", client.WorkerDeploymentVersionDrainageStatusDrained)
verifyDeployment(dHandle, "", "", 0, client.WorkerDeploymentVersionDrainageStatusDrained)

// Rollback a drained version 1.0, so that it is the current version for this deployment
_, err = dHandle.SetCurrentVersion(context.Background(), client.WorkerDeploymentSetCurrentVersionOptions{
Version: deploymentName + ".1.0",
BuildID: build1,
IgnoreMissingTaskQueues: true,
})
if err != nil {
Expand All @@ -144,7 +153,7 @@ func main() {
// Set current version to "__unversioned__" again so that version 1.0 can start draining. This replicates the
// scenario where a rolled back version is now draining.
_, err = dHandle.SetCurrentVersion(context.Background(), client.WorkerDeploymentSetCurrentVersionOptions{
Version: "__unversioned__",
BuildID: "",
IgnoreMissingTaskQueues: true,
})
if err != nil {
Expand All @@ -154,7 +163,7 @@ func main() {
// Waiting for the version 1.0 to become drained.
time.Sleep(8 * time.Second)
// Make sure 1.0 is drained.
verifyDeployment(dHandle, "__unversioned__", "", client.WorkerDeploymentVersionDrainageStatusDrained)
verifyDeployment(dHandle, "", "", 0, client.WorkerDeploymentVersionDrainageStatusDrained)

// Stopping both workers
w1.Stop()
Expand All @@ -165,7 +174,7 @@ func main() {

// Delete the deployment version
_, err = dHandle.DeleteVersion(context.Background(), client.WorkerDeploymentDeleteVersionOptions{
Version: deploymentName + ".1.0",
BuildID: build1,
SkipDrainage: true,
})
if err != nil {
Expand All @@ -184,23 +193,52 @@ func main() {

//nolint:revive
func verifyDeployment(dHandle client.WorkerDeploymentHandle,
expectedCurrentVersion string,
expectedRampingVersion string,
expectedCurrentVersionBuildId string,
expectedRampingVersionBuildId string,
expectedRampPercentage float32,
expectedDrainageStatus client.WorkerDeploymentVersionDrainageStatus,
) {
describeResponse, err := dHandle.Describe(context.Background(), client.WorkerDeploymentDescribeOptions{})
if err != nil {
log.Fatalln("Unable to describe deployment", err)
}
if describeResponse.Info.RoutingConfig.CurrentVersion != expectedCurrentVersion {
log.Fatalln("Current version is not ", expectedCurrentVersion)
if cv := describeResponse.Info.RoutingConfig.CurrentVersion; cv != nil {
if cv.BuildId != expectedCurrentVersionBuildId {
log.Fatalln(fmt.Sprintf("Current version build id is %s not %s", cv.BuildId, expectedCurrentVersionBuildId))
}
} else {
if expectedCurrentVersionBuildId != "" {
log.Fatalln("Current version is empty, expected build id ", expectedCurrentVersionBuildId)
}
}

if rv := describeResponse.Info.RoutingConfig.RampingVersion; rv != nil {
if rv.BuildId != expectedRampingVersionBuildId {
log.Fatalln(fmt.Sprintf("Ramping version build id is %s not %s", rv.BuildId, expectedRampingVersionBuildId))
}
} else {
if expectedRampingVersionBuildId != "" {
log.Fatalln("Ramping version is empty, expected build id ", expectedRampingVersionBuildId)
}
}

if rp := describeResponse.Info.RoutingConfig.RampingVersionPercentage; rp != expectedRampPercentage {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

improved error messages here only because I ran into some dumb errors and wanted better messages

log.Fatalln(fmt.Sprintf("Ramping percent is %v, expected %v", rp, expectedRampPercentage))
}

if describeResponse.Info.RoutingConfig.RampingVersion != expectedRampingVersion {
log.Fatalln("Ramping version is not ", expectedRampingVersion)
if ds := describeResponse.Info.VersionSummaries[0].DrainageStatus; ds != expectedDrainageStatus {
log.Fatalln(fmt.Sprintf("Drainage status is %v, not %v", drainageStatusString(ds), drainageStatusString(expectedDrainageStatus)))
}
}

if describeResponse.Info.VersionSummaries[0].DrainageStatus != expectedDrainageStatus {
log.Fatalln("Drainage status is not ", expectedDrainageStatus)
func drainageStatusString(ds client.WorkerDeploymentVersionDrainageStatus) string {
switch ds {
case client.WorkerDeploymentVersionDrainageStatusDrained:
return "Drained"
case client.WorkerDeploymentVersionDrainageStatusDraining:
return "Draining"
case client.WorkerDeploymentVersionDrainageStatusUnspecified:
return "Unspecified"
}
return "Unknown"
}
Loading
Loading