Skip to content

Commit 71752f5

Browse files
committed
add tests
1 parent 570a6a5 commit 71752f5

File tree

7 files changed

+172
-65
lines changed

7 files changed

+172
-65
lines changed
Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
//go:build !race
2+
// +build !race
3+
4+
// Copyright (c) HashiCorp, Inc.
5+
// SPDX-License-Identifier: BUSL-1.1
6+
7+
package command
8+
9+
import (
10+
"encoding/json"
11+
"path"
12+
"strings"
13+
"testing"
14+
"time"
15+
16+
"github.com/hashicorp/cli"
17+
testing_command "github.com/hashicorp/terraform/internal/command/testing"
18+
"github.com/hashicorp/terraform/internal/command/views"
19+
"github.com/hashicorp/terraform/internal/terminal"
20+
"github.com/zclconf/go-cty/cty"
21+
)
22+
23+
// The test contains a data race due to the disabling of the provider lock.
24+
// The provider lock was disabled, so that we can measure the true duration of the
25+
// test operation. Without disabling the provider lock, runs may block each other
26+
// when working with the provider, which does not happen by default in the real-world.
27+
func TestTest_ParallelJSON(t *testing.T) {
28+
td := t.TempDir()
29+
testCopyDir(t, testFixturePath(path.Join("test", "parallel")), td)
30+
defer testChdir(t, td)()
31+
32+
provider := testing_command.NewProvider(&testing_command.ResourceStore{
33+
Data: make(map[string]cty.Value),
34+
Nolock: true,
35+
})
36+
providerSource, close := newMockProviderSource(t, map[string][]string{
37+
"test": {"1.0.0"},
38+
})
39+
defer close()
40+
41+
streams, done := terminal.StreamsForTesting(t)
42+
view := views.NewView(streams)
43+
ui := new(cli.MockUi)
44+
45+
meta := Meta{
46+
testingOverrides: metaOverridesForProvider(provider.Provider),
47+
Ui: ui,
48+
View: view,
49+
Streams: streams,
50+
ProviderSource: providerSource,
51+
}
52+
53+
init := &InitCommand{Meta: meta}
54+
if code := init.Run(nil); code != 0 {
55+
output := done(t)
56+
t.Fatalf("expected status code %d but got %d: %s", 9, code, output.All())
57+
}
58+
59+
c := &TestCommand{Meta: meta}
60+
c.Run([]string{"-json", "-no-color"})
61+
output := done(t).All()
62+
63+
if !strings.Contains(output, "40 passed, 0 failed") {
64+
t.Errorf("output didn't produce the right output:\n\n%s", output)
65+
}
66+
67+
// Split the log into lines
68+
lines := strings.Split(output, "\n")
69+
70+
// Find the start of the teardown and complete timestamps
71+
// The difference is the approximate duration of the test teardown operation.
72+
// This test is running in parallel, so we expect the teardown to also run in parallel.
73+
// We sleep for 3 seconds in the test teardown to simulate a long-running destroy.
74+
// There are 6 unique state keys in the parallel test, so we expect the teardown to take less than 3*6 (18) seconds.
75+
var startTimestamp, completeTimestamp string
76+
for _, line := range lines {
77+
if strings.Contains(line, `{"path":"parallel.tftest.hcl","progress":"teardown"`) {
78+
var obj map[string]interface{}
79+
if err := json.Unmarshal([]byte(line), &obj); err == nil {
80+
if ts, ok := obj["@timestamp"].(string); ok {
81+
startTimestamp = ts
82+
}
83+
}
84+
} else if strings.Contains(line, `{"path":"parallel.tftest.hcl","progress":"complete"`) {
85+
var obj map[string]interface{}
86+
if err := json.Unmarshal([]byte(line), &obj); err == nil {
87+
if ts, ok := obj["@timestamp"].(string); ok {
88+
completeTimestamp = ts
89+
}
90+
}
91+
}
92+
}
93+
94+
if startTimestamp == "" || completeTimestamp == "" {
95+
t.Fatalf("could not find start or complete timestamp in log output")
96+
}
97+
98+
startTime, err := time.Parse(time.RFC3339Nano, startTimestamp)
99+
if err != nil {
100+
t.Fatalf("failed to parse start timestamp: %v", err)
101+
}
102+
completeTime, err := time.Parse(time.RFC3339Nano, completeTimestamp)
103+
if err != nil {
104+
t.Fatalf("failed to parse complete timestamp: %v", err)
105+
}
106+
dur := completeTime.Sub(startTime)
107+
if dur > 10*time.Second {
108+
t.Fatalf("parallel.tftest.hcl duration took too long: %0.2f seconds", dur.Seconds())
109+
}
110+
}

internal/command/test_test.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -581,6 +581,7 @@ main.tftest.hcl/single, and they need to be cleaned up manually:
581581
t.Errorf("expected err to be %s\n\nbut got %s\n\n diff:%s\n", cleanupErr, err, diff)
582582
}
583583
if diff := cmp.Diff(cleanupMessage, output.Stdout()); diff != "" {
584+
fmt.Printf("expected output to be %s\n\nbut got %s\n\n diff:%s\n", cleanupMessage, output.Stdout(), diff)
584585
t.Errorf("expected output to be %s\n\nbut got %s\n\n diff:%s\n", cleanupMessage, output.Stdout(), diff)
585586
}
586587

internal/command/testdata/test/parallel/main.tf

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ variable "input" {
55

66
resource "test_resource" "foo" {
77
value = var.input
8+
destroy_wait_seconds = 3
89
}
910

1011
output "value" {

internal/command/testdata/test/parallel/parallel.tftest.hcl

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -515,10 +515,3 @@ run "test_30" {
515515
error_message = "error in test_30"
516516
}
517517
}
518-
519-
// Expected order:
520-
// - run [setup]
521-
// - run [test_a, test_d]
522-
// - run [test_b]
523-
// - run [test_c]
524-

internal/command/testing/test_provider.go

Lines changed: 24 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -133,6 +133,7 @@ func NewProvider(store *ResourceStore) *TestProvider {
133133
provider.Provider.CallFunctionFn = provider.CallFunction
134134
provider.Provider.OpenEphemeralResourceFn = provider.OpenEphemeralResource
135135
provider.Provider.CloseEphemeralResourceFn = provider.CloseEphemeralResource
136+
provider.Provider.NoLock = store.Nolock
136137

137138
return provider
138139
}
@@ -192,8 +193,7 @@ func (provider *TestProvider) DataSourceCount() int {
192193
}
193194

194195
func (provider *TestProvider) count(prefix string) int {
195-
provider.Store.mutex.RLock()
196-
defer provider.Store.mutex.RUnlock()
196+
defer provider.Store.beginRead()()
197197

198198
if len(prefix) == 0 {
199199
return len(provider.Store.Data)
@@ -209,8 +209,7 @@ func (provider *TestProvider) count(prefix string) int {
209209
}
210210

211211
func (provider *TestProvider) string(prefix string) string {
212-
provider.Store.mutex.RLock()
213-
defer provider.Store.mutex.RUnlock()
212+
defer provider.Store.beginRead()()
214213

215214
var keys []string
216215
for key := range provider.Store.Data {
@@ -393,14 +392,14 @@ func (provider *TestProvider) CloseEphemeralResource(providers.CloseEphemeralRes
393392
// For example, when the test provider gets a ReadResource request it will search
394393
// the store for a resource with a matching ID. See (*TestProvider).ReadResource.
395394
type ResourceStore struct {
396-
mutex sync.RWMutex
395+
mutex sync.RWMutex
396+
Nolock bool // nolock is used to disable locking
397397

398398
Data map[string]cty.Value
399399
}
400400

401401
func (store *ResourceStore) Delete(key string) cty.Value {
402-
store.mutex.Lock()
403-
defer store.mutex.Unlock()
402+
defer store.beginWrite()()
404403

405404
if resource, ok := store.Data[key]; ok {
406405
delete(store.Data, key)
@@ -410,15 +409,13 @@ func (store *ResourceStore) Delete(key string) cty.Value {
410409
}
411410

412411
func (store *ResourceStore) Get(key string) cty.Value {
413-
store.mutex.RLock()
414-
defer store.mutex.RUnlock()
412+
defer store.beginRead()()
415413

416414
return store.get(key)
417415
}
418416

419417
func (store *ResourceStore) Put(key string, resource cty.Value) cty.Value {
420-
store.mutex.Lock()
421-
defer store.mutex.Unlock()
418+
defer store.beginWrite()()
422419

423420
old := store.get(key)
424421
store.Data[key] = resource
@@ -431,3 +428,19 @@ func (store *ResourceStore) get(key string) cty.Value {
431428
}
432429
return cty.NilVal
433430
}
431+
432+
func (store *ResourceStore) beginWrite() func() {
433+
if store.Nolock {
434+
return func() {}
435+
}
436+
store.mutex.Lock()
437+
return store.mutex.Unlock
438+
439+
}
440+
func (store *ResourceStore) beginRead() func() {
441+
if store.Nolock {
442+
return func() {}
443+
}
444+
store.mutex.RLock()
445+
return store.mutex.RUnlock
446+
}

internal/moduletest/graph/node_state_cleanup.go

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -136,11 +136,7 @@ func (n *NodeStateCleanup) destroy(ctx *EvalContext, runNode *NodeTestRun, waite
136136
Overrides: mocking.PackageOverrides(run.Config, file.Config, run.ModuleConfig),
137137
}
138138

139-
tfCtx, ctxDiags := terraform.NewContext(n.opts.ContextOpts)
140-
diags = diags.Append(ctxDiags)
141-
if ctxDiags.HasErrors() {
142-
return state, diags
143-
}
139+
tfCtx, _ := terraform.NewContext(n.opts.ContextOpts)
144140
ctx.Renderer().Run(run, file, moduletest.TearDown, 0)
145141

146142
waiter.update(tfCtx, moduletest.TearDown, nil)

0 commit comments

Comments
 (0)