1
- import numpy as np
2
- import matplotlib .pyplot as plt
3
1
import random
4
-
5
2
from collections import defaultdict
6
3
4
+ import matplotlib .pyplot as plt
5
+ import numpy as np
7
6
from opentelemetry import metrics
8
7
from opentelemetry .sdk .metrics import Counter , MeterProvider
9
8
from opentelemetry .sdk .metrics .export .aggregate import SumAggregator
10
9
from opentelemetry .sdk .metrics .export .controller import PushController
11
- from opentelemetry .sdk .metrics .export .in_memory_metrics_exporter import InMemoryMetricsExporter
10
+ from opentelemetry .sdk .metrics .export .in_memory_metrics_exporter import (
11
+ InMemoryMetricsExporter ,
12
+ )
12
13
from opentelemetry .sdk .metrics .view import View , ViewConfig
13
14
14
- ## set up opentelemetry
15
+ # set up opentelemetry
15
16
16
17
# Sets the global MeterProvider instance
17
18
metrics .set_meter_provider (MeterProvider ())
46
47
47
48
meter .register_view (counter_view )
48
49
49
- ## generate the random metric data
50
+ # generate the random metric data
51
+
50
52
51
53
def unknown_customer_calls ():
52
54
"""Generate customer call data to our application"""
@@ -57,23 +59,49 @@ def unknown_customer_calls():
57
59
random .seed (1 )
58
60
59
61
# customer 123 is a big user, and made 1000 requests in this timeframe
60
- requests = np .random .normal (1000 , 250 , 1000 ) # 1000 requests with average 1000 bytes, covariance 100
62
+ requests = np .random .normal (
63
+ 1000 , 250 , 1000
64
+ ) # 1000 requests with average 1000 bytes, covariance 100
61
65
62
66
for request in requests :
63
- bytes_counter .add (int (request ), {"environment" : "production" , "method" : "REST" , "customer_id" : 123 })
67
+ bytes_counter .add (
68
+ int (request ),
69
+ {
70
+ "environment" : "production" ,
71
+ "method" : "REST" ,
72
+ "customer_id" : 123 ,
73
+ },
74
+ )
64
75
65
76
# customer 247 is another big user, making fewer, but bigger requests
66
- requests = np .random .normal (5000 , 1250 , 200 ) # 200 requests with average size of 5k bytes
77
+ requests = np .random .normal (
78
+ 5000 , 1250 , 200
79
+ ) # 200 requests with average size of 5k bytes
67
80
68
81
for request in requests :
69
- bytes_counter .add (int (request ), {"environment" : "production" , "method" : "REST" , "customer_id" : 247 })
82
+ bytes_counter .add (
83
+ int (request ),
84
+ {
85
+ "environment" : "production" ,
86
+ "method" : "REST" ,
87
+ "customer_id" : 247 ,
88
+ },
89
+ )
70
90
71
91
# There are many other smaller customers
72
92
for customer_id in range (250 ):
73
93
requests = np .random .normal (1000 , 250 , np .random .randint (1 , 10 ))
74
94
method = "REST" if np .random .randint (2 ) else "gRPC"
75
95
for request in requests :
76
- bytes_counter .add (int (request ), {"environment" : "production" , "method" : method , "customer_id" : customer_id })
96
+ bytes_counter .add (
97
+ int (request ),
98
+ {
99
+ "environment" : "production" ,
100
+ "method" : method ,
101
+ "customer_id" : customer_id ,
102
+ },
103
+ )
104
+
77
105
78
106
unknown_customer_calls ()
79
107
@@ -93,10 +121,15 @@ def unknown_customer_calls():
93
121
customer_bytes_map [exemplar .dropped_labels ] += exemplar .value
94
122
95
123
96
- customer_bytes_list = sorted (list (customer_bytes_map .items ()), key = lambda t : t [1 ], reverse = True )
124
+ customer_bytes_list = sorted (
125
+ list (customer_bytes_map .items ()), key = lambda t : t [1 ], reverse = True
126
+ )
97
127
98
128
# Save our top 5 customers and sum all of the rest into "Others".
99
- top_5_customers = [("Customer {}" .format (dict (val [0 ])["customer_id" ]), val [1 ]) for val in customer_bytes_list [:5 ]] + [("Other Customers" , sum ([val [1 ] for val in customer_bytes_list [5 :]]))]
129
+ top_5_customers = [
130
+ ("Customer {}" .format (dict (val [0 ])["customer_id" ]), val [1 ])
131
+ for val in customer_bytes_list [:5 ]
132
+ ] + [("Other Customers" , sum ([val [1 ] for val in customer_bytes_list [5 :]]))]
100
133
101
134
# unzip the data into X (sizes of each customer's contribution) and labels
102
135
labels , X = zip (* top_5_customers )
@@ -106,26 +139,45 @@ def unknown_customer_calls():
106
139
plt .show ()
107
140
108
141
# Estimate how many bytes customer 123 sent
109
- customer_123_bytes = customer_bytes_map [(("customer_id" , 123 ), ("method" , "REST" ))]
142
+ customer_123_bytes = customer_bytes_map [
143
+ (("customer_id" , 123 ), ("method" , "REST" ))
144
+ ]
110
145
111
146
# Since the exemplars were randomly sampled, all sample_counts will be the same
112
147
sample_count = exemplars [0 ].sample_count
113
148
print ("sample count" , sample_count , "custmer" , customer_123_bytes )
114
149
full_customer_123_bytes = sample_count * customer_123_bytes
115
150
116
151
# With seed == 1 we get 1008612 - quite close to the statistical mean of 1000000! (more exemplars would make this estimation even more accurate)
117
- print ("Customer 123 sent about {} bytes this interval" .format (int (full_customer_123_bytes )))
152
+ print (
153
+ "Customer 123 sent about {} bytes this interval" .format (
154
+ int (full_customer_123_bytes )
155
+ )
156
+ )
118
157
119
158
# Determine the top 25 customers by how many bytes they sent in exemplars
120
159
top_25_customers = customer_bytes_list [:25 ]
121
160
122
161
# out of those 25 customers, determine how many used grpc, and come up with a ratio
123
- percent_grpc = len (list (filter (lambda customer_value : customer_value [0 ][1 ][1 ] == "gRPC" , top_25_customers ))) / len (top_25_customers )
124
-
125
- print ("~{}% of the top 25 customers (by bytes in) used gRPC this interval" .format (int (percent_grpc * 100 )))
162
+ percent_grpc = len (
163
+ list (
164
+ filter (
165
+ lambda customer_value : customer_value [0 ][1 ][1 ] == "gRPC" ,
166
+ top_25_customers ,
167
+ )
168
+ )
169
+ ) / len (top_25_customers )
170
+
171
+ print (
172
+ "~{}% of the top 25 customers (by bytes in) used gRPC this interval" .format (
173
+ int (percent_grpc * 100 )
174
+ )
175
+ )
126
176
127
177
# Determine the 50th, 90th, and 99th percentile of byte size sent in
128
- quantiles = np .quantile ([exemplar .value for exemplar in exemplars ], [0.5 , 0.9 , 0.99 ])
178
+ quantiles = np .quantile (
179
+ [exemplar .value for exemplar in exemplars ], [0.5 , 0.9 , 0.99 ]
180
+ )
129
181
print ("50th Percentile Bytes In:" , int (quantiles [0 ]))
130
182
print ("90th Percentile Bytes In:" , int (quantiles [1 ]))
131
183
print ("99th Percentile Bytes In:" , int (quantiles [2 ]))
0 commit comments