Skip to content

Commit 771fd4c

Browse files
committed
Fix up walreceiver
Signed-off-by: Felix Yuan <[email protected]>
1 parent a55ffe8 commit 771fd4c

File tree

2 files changed

+30
-55
lines changed

2 files changed

+30
-55
lines changed

collector/pg_stat_walreceiver.go

+13-36
Original file line numberDiff line numberDiff line change
@@ -37,64 +37,65 @@ func NewPGStatWalReceiverCollector(config collectorConfig) (Collector, error) {
3737
}
3838

3939
var (
40+
labelCats = []string{"upstream_host", "slot_name", "status"}
4041
statWalReceiverStatus = prometheus.NewDesc(
4142
prometheus.BuildFQName(namespace, statWalReceiverSubsystem, "status"),
4243
"Activity status of the WAL receiver process",
43-
[]string{"upstream_host", "slot_name"},
44+
labelCats,
4445
prometheus.Labels{},
4546
)
4647
statWalReceiverReceiveStartLsn = prometheus.NewDesc(
4748
prometheus.BuildFQName(namespace, statWalReceiverSubsystem, "receive_start_lsn"),
4849
"First write-ahead log location used when WAL receiver is started represented as a decimal",
49-
[]string{"upstream_host", "slot_name"},
50+
labelCats,
5051
prometheus.Labels{},
5152
)
5253
statWalReceiverReceiveStartTli = prometheus.NewDesc(
5354
prometheus.BuildFQName(namespace, statWalReceiverSubsystem, "receive_start_tli"),
5455
"First timeline number used when WAL receiver is started",
55-
[]string{"upstream_host", "slot_name"},
56+
labelCats,
5657
prometheus.Labels{},
5758
)
5859
statWalReceiverFlushedLSN = prometheus.NewDesc(
5960
prometheus.BuildFQName(namespace, statWalReceiverSubsystem, "flushed_lsn"),
6061
"Last write-ahead log location already received and flushed to disk, the initial value of this field being the first log location used when WAL receiver is started represented as a decimal",
61-
[]string{"upstream_host", "slot_name"},
62+
labelCats,
6263
prometheus.Labels{},
6364
)
6465
statWalReceiverReceivedTli = prometheus.NewDesc(
6566
prometheus.BuildFQName(namespace, statWalReceiverSubsystem, "received_tli"),
6667
"Timeline number of last write-ahead log location received and flushed to disk",
67-
[]string{"upstream_host", "slot_name"},
68+
labelCats,
6869
prometheus.Labels{},
6970
)
7071
statWalReceiverLastMsgSendTime = prometheus.NewDesc(
7172
prometheus.BuildFQName(namespace, statWalReceiverSubsystem, "last_msg_send_time"),
7273
"Send time of last message received from origin WAL sender",
73-
[]string{"upstream_host", "slot_name"},
74+
labelCats,
7475
prometheus.Labels{},
7576
)
7677
statWalReceiverLastMsgReceiptTime = prometheus.NewDesc(
7778
prometheus.BuildFQName(namespace, statWalReceiverSubsystem, "last_msg_receipt_time"),
7879
"Send time of last message received from origin WAL sender",
79-
[]string{"upstream_host", "slot_name"},
80+
labelCats,
8081
prometheus.Labels{},
8182
)
8283
statWalReceiverLatestEndLsn = prometheus.NewDesc(
8384
prometheus.BuildFQName(namespace, statWalReceiverSubsystem, "latest_end_lsn"),
8485
"Last write-ahead log location reported to origin WAL sender as integer",
85-
[]string{"upstream_host", "slot_name"},
86+
labelCats,
8687
prometheus.Labels{},
8788
)
8889
statWalReceiverLatestEndTime = prometheus.NewDesc(
8990
prometheus.BuildFQName(namespace, statWalReceiverSubsystem, "latest_end_time"),
9091
"Time of last write-ahead log location reported to origin WAL sender",
91-
[]string{"upstream_host", "slot_name"},
92+
labelCats,
9293
prometheus.Labels{},
9394
)
9495
statWalReceiverUpstreamNode = prometheus.NewDesc(
9596
prometheus.BuildFQName(namespace, statWalReceiverSubsystem, "upstream_node"),
9697
"Node ID of the upstream node",
97-
[]string{"upstream_host", "slot_name"},
98+
labelCats,
9899
prometheus.Labels{},
99100
)
100101

@@ -167,29 +168,12 @@ func (c *PGStatWalReceiverCollector) Update(ctx context.Context, instance *insta
167168
level.Debug(c.log).Log("msg", "Skipping wal receiver stats because slotname host is null")
168169
continue
169170
}
170-
labels := []string{upstreamHost.String, slotName.String}
171+
171172
if !status.Valid {
172173
level.Debug(c.log).Log("msg", "Skipping wal receiver stats because status is null")
173174
continue
174175
}
175-
176-
var statusMetric float64
177-
switch status.String {
178-
case "stopped":
179-
statusMetric = 0.0
180-
case "starting":
181-
statusMetric = 1.0
182-
case "streaming":
183-
statusMetric = 2.0
184-
case "waiting":
185-
statusMetric = 3.0
186-
case "restarting":
187-
statusMetric = 4.0
188-
case "stopping":
189-
statusMetric = -1.0
190-
default:
191-
statusMetric = -2.0
192-
}
176+
labels := []string{upstreamHost.String, slotName.String, status.String}
193177

194178
if !receiveStartLsn.Valid {
195179
level.Debug(c.log).Log("msg", "Skipping wal receiver stats because receive_start_lsn is null")
@@ -227,14 +211,7 @@ func (c *PGStatWalReceiverCollector) Update(ctx context.Context, instance *insta
227211
level.Debug(c.log).Log("msg", "Skipping wal receiver stats because upstream_node is null")
228212
continue
229213
}
230-
231214
receiveStartLsnMetric := float64(receiveStartLsn.Int64)
232-
ch <- prometheus.MustNewConstMetric(
233-
statWalReceiverStatus,
234-
prometheus.GaugeValue,
235-
statusMetric,
236-
labels...)
237-
238215
ch <- prometheus.MustNewConstMetric(
239216
statWalReceiverReceiveStartLsn,
240217
prometheus.CounterValue,

collector/pg_stat_walreceiver_test.go

+17-19
Original file line numberDiff line numberDiff line change
@@ -87,16 +87,15 @@ func TestPGStatWalReceiverCollectorWithFlushedLSN(t *testing.T) {
8787
}
8888
}()
8989
expected := []MetricResult{
90-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: -1.0, metricType: dto.MetricType_GAUGE},
91-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1200668684563608, metricType: dto.MetricType_COUNTER},
92-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1687321285, metricType: dto.MetricType_GAUGE},
93-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1200668684563609, metricType: dto.MetricType_COUNTER},
94-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1687321280, metricType: dto.MetricType_GAUGE},
95-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1687321275, metricType: dto.MetricType_COUNTER},
96-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1687321276, metricType: dto.MetricType_COUNTER},
97-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1200668684563610, metricType: dto.MetricType_COUNTER},
98-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1687321277, metricType: dto.MetricType_COUNTER},
99-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 5, metricType: dto.MetricType_GAUGE},
90+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "stopping"}, value: 1200668684563608, metricType: dto.MetricType_COUNTER},
91+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "stopping"}, value: 1687321285, metricType: dto.MetricType_GAUGE},
92+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "stopping"}, value: 1200668684563609, metricType: dto.MetricType_COUNTER},
93+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "stopping"}, value: 1687321280, metricType: dto.MetricType_GAUGE},
94+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "stopping"}, value: 1687321275, metricType: dto.MetricType_COUNTER},
95+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "stopping"}, value: 1687321276, metricType: dto.MetricType_COUNTER},
96+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "stopping"}, value: 1200668684563610, metricType: dto.MetricType_COUNTER},
97+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "stopping"}, value: 1687321277, metricType: dto.MetricType_COUNTER},
98+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "stopping"}, value: 5, metricType: dto.MetricType_GAUGE},
10099
}
101100
convey.Convey("Metrics comparison", t, func() {
102101
for _, expect := range expected {
@@ -165,15 +164,14 @@ func TestPGStatWalReceiverCollectorWithNoFlushedLSN(t *testing.T) {
165164
}
166165
}()
167166
expected := []MetricResult{
168-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1.0, metricType: dto.MetricType_GAUGE},
169-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1200668684563608, metricType: dto.MetricType_COUNTER},
170-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1687321285, metricType: dto.MetricType_GAUGE},
171-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1687321280, metricType: dto.MetricType_GAUGE},
172-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1687321275, metricType: dto.MetricType_COUNTER},
173-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1687321276, metricType: dto.MetricType_COUNTER},
174-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1200668684563610, metricType: dto.MetricType_COUNTER},
175-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 1687321277, metricType: dto.MetricType_COUNTER},
176-
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar"}, value: 5, metricType: dto.MetricType_GAUGE},
167+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "starting"}, value: 1200668684563608, metricType: dto.MetricType_COUNTER},
168+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "starting"}, value: 1687321285, metricType: dto.MetricType_GAUGE},
169+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "starting"}, value: 1687321280, metricType: dto.MetricType_GAUGE},
170+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "starting"}, value: 1687321275, metricType: dto.MetricType_COUNTER},
171+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "starting"}, value: 1687321276, metricType: dto.MetricType_COUNTER},
172+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "starting"}, value: 1200668684563610, metricType: dto.MetricType_COUNTER},
173+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "starting"}, value: 1687321277, metricType: dto.MetricType_COUNTER},
174+
{labels: labelMap{"upstream_host": "foo", "slot_name": "bar", "status": "starting"}, value: 5, metricType: dto.MetricType_GAUGE},
177175
}
178176
convey.Convey("Metrics comparison", t, func() {
179177
for _, expect := range expected {

0 commit comments

Comments
 (0)