6
6
_ "net/http/pprof"
7
7
"os"
8
8
"path/filepath"
9
+ "sort"
9
10
"testing"
10
11
"time"
11
12
@@ -14,6 +15,7 @@ import (
14
15
"github.com/prometheus/common/model"
15
16
"github.com/prometheus/prometheus/storage"
16
17
"github.com/prometheus/prometheus/tsdb"
18
+ "github.com/samber/lo"
17
19
"github.com/stretchr/testify/assert"
18
20
"github.com/stretchr/testify/require"
19
21
@@ -23,6 +25,7 @@ import (
23
25
"github.com/grafana/pyroscope/pkg/objstore/client"
24
26
"github.com/grafana/pyroscope/pkg/objstore/providers/filesystem"
25
27
"github.com/grafana/pyroscope/pkg/phlaredb/block"
28
+ "github.com/grafana/pyroscope/pkg/phlaredb/sharding"
26
29
"github.com/grafana/pyroscope/pkg/phlaredb/tsdb/index"
27
30
"github.com/grafana/pyroscope/pkg/pprof/testhelper"
28
31
)
@@ -85,6 +88,153 @@ func TestCompact(t *testing.T) {
85
88
require .Equal (t , expected .String (), res .String ())
86
89
}
87
90
91
+ func TestCompactWithSplitting (t * testing.T ) {
92
+ ctx := context .Background ()
93
+
94
+ b1 := newBlock (t , func () []* testhelper.ProfileBuilder {
95
+ return append (
96
+ profileSeriesGenerator (t , time .Unix (1 , 0 ), time .Unix (10 , 0 ), time .Second , "job" , "a" ),
97
+ profileSeriesGenerator (t , time .Unix (11 , 0 ), time .Unix (20 , 0 ), time .Second , "job" , "b" )... ,
98
+ )
99
+ })
100
+ b2 := newBlock (t , func () []* testhelper.ProfileBuilder {
101
+ return append (
102
+ append (
103
+ append (
104
+ profileSeriesGenerator (t , time .Unix (1 , 0 ), time .Unix (10 , 0 ), time .Second , "job" , "c" ),
105
+ profileSeriesGenerator (t , time .Unix (11 , 0 ), time .Unix (20 , 0 ), time .Second , "job" , "d" )... ,
106
+ ), profileSeriesGenerator (t , time .Unix (1 , 0 ), time .Unix (10 , 0 ), time .Second , "job" , "a" )... ,
107
+ ),
108
+ profileSeriesGenerator (t , time .Unix (11 , 0 ), time .Unix (20 , 0 ), time .Second , "job" , "b" )... ,
109
+ )
110
+ })
111
+ dst := t .TempDir ()
112
+ compacted , err := CompactWithSplitting (ctx , []BlockReader {b1 , b2 , b2 , b1 }, 16 , dst )
113
+ require .NoError (t , err )
114
+
115
+ // 4 shards one per series.
116
+ require .Equal (t , 4 , len (compacted ))
117
+ require .Equal (t , "1_of_16" , compacted [0 ].Labels [sharding .CompactorShardIDLabel ])
118
+ require .Equal (t , "6_of_16" , compacted [1 ].Labels [sharding .CompactorShardIDLabel ])
119
+ require .Equal (t , "7_of_16" , compacted [2 ].Labels [sharding .CompactorShardIDLabel ])
120
+ require .Equal (t , "14_of_16" , compacted [3 ].Labels [sharding .CompactorShardIDLabel ])
121
+
122
+ // The series b should span from 11 to 20 and not 1 to 20.
123
+ require .Equal (t , model .TimeFromUnix (11 ), compacted [1 ].MinTime )
124
+ require .Equal (t , model .TimeFromUnix (20 ), compacted [1 ].MaxTime )
125
+
126
+ // We first verify we have all series and timestamps across querying all blocks.
127
+ queriers := make (Queriers , len (compacted ))
128
+ for i , blk := range compacted {
129
+ queriers [i ] = blockQuerierFromMeta (t , dst , blk )
130
+ }
131
+
132
+ err = queriers .Open (context .Background ())
133
+ require .NoError (t , err )
134
+ matchAll := & ingesterv1.SelectProfilesRequest {
135
+ LabelSelector : "{}" ,
136
+ Type : mustParseProfileSelector (t , "process_cpu:cpu:nanoseconds:cpu:nanoseconds" ),
137
+ Start : 0 ,
138
+ End : 40000 ,
139
+ }
140
+ it , err := queriers .SelectMatchingProfiles (context .Background (), matchAll )
141
+ require .NoError (t , err )
142
+
143
+ seriesMap := make (map [model.Fingerprint ]lo.Tuple2 [phlaremodel.Labels , []model.Time ])
144
+ for it .Next () {
145
+ r := it .At ()
146
+ seriesMap [r .Fingerprint ()] = lo .T2 (r .Labels ().WithoutPrivateLabels (), append (seriesMap [r .Fingerprint ()].B , r .Timestamp ()))
147
+ }
148
+ require .NoError (t , it .Err ())
149
+ require .NoError (t , it .Close ())
150
+ series := lo .Values (seriesMap )
151
+ sort .Slice (series , func (i , j int ) bool {
152
+ return phlaremodel .CompareLabelPairs (series [i ].A , series [j ].A ) < 0
153
+ })
154
+ require .Equal (t , []lo.Tuple2 [phlaremodel.Labels , []model.Time ]{
155
+ lo .T2 (phlaremodel .LabelsFromStrings ("job" , "a" ),
156
+ generateTimes (t , model .TimeFromUnix (1 ), model .TimeFromUnix (10 )),
157
+ ),
158
+ lo .T2 (phlaremodel .LabelsFromStrings ("job" , "b" ),
159
+ generateTimes (t , model .TimeFromUnix (11 ), model .TimeFromUnix (20 )),
160
+ ),
161
+ lo .T2 (phlaremodel .LabelsFromStrings ("job" , "c" ),
162
+ generateTimes (t , model .TimeFromUnix (1 ), model .TimeFromUnix (10 )),
163
+ ),
164
+ lo .T2 (phlaremodel .LabelsFromStrings ("job" , "d" ),
165
+ generateTimes (t , model .TimeFromUnix (11 ), model .TimeFromUnix (20 )),
166
+ ),
167
+ }, series )
168
+
169
+ // Then we query 2 different shards and verify we have a subset of series.
170
+ it , err = queriers [0 ].SelectMatchingProfiles (ctx , matchAll )
171
+ require .NoError (t , err )
172
+ seriesResult , err := queriers [0 ].MergeByLabels (context .Background (), it , "job" )
173
+ require .NoError (t , err )
174
+ require .Equal (t ,
175
+ []* typesv1.Series {
176
+ {
177
+ Labels : phlaremodel .LabelsFromStrings ("job" , "a" ),
178
+ Points : generatePoints (t , model .TimeFromUnix (1 ), model .TimeFromUnix (10 )),
179
+ },
180
+ }, seriesResult )
181
+
182
+ it , err = queriers [1 ].SelectMatchingProfiles (ctx , matchAll )
183
+ require .NoError (t , err )
184
+ seriesResult , err = queriers [1 ].MergeByLabels (context .Background (), it , "job" )
185
+ require .NoError (t , err )
186
+ require .Equal (t ,
187
+ []* typesv1.Series {
188
+ {
189
+ Labels : phlaremodel .LabelsFromStrings ("job" , "b" ),
190
+ Points : generatePoints (t , model .TimeFromUnix (11 ), model .TimeFromUnix (20 )),
191
+ },
192
+ }, seriesResult )
193
+
194
+ // Finally test some stacktraces resolution.
195
+ it , err = queriers [1 ].SelectMatchingProfiles (ctx , matchAll )
196
+ require .NoError (t , err )
197
+ res , err := queriers [1 ].MergeByStacktraces (ctx , it )
198
+ require .NoError (t , err )
199
+
200
+ expected := new (phlaremodel.Tree )
201
+ expected .InsertStack (10 , "baz" , "bar" , "foo" )
202
+ require .Equal (t , expected .String (), res .String ())
203
+ }
204
+
205
+ // nolint:unparam
206
+ func profileSeriesGenerator (t * testing.T , from , through time.Time , interval time.Duration , lbls ... string ) []* testhelper.ProfileBuilder {
207
+ t .Helper ()
208
+ var builders []* testhelper.ProfileBuilder
209
+ for ts := from ; ts .Before (through ) || ts .Equal (through ); ts = ts .Add (interval ) {
210
+ builders = append (builders ,
211
+ testhelper .NewProfileBuilder (ts .UnixNano ()).
212
+ CPUProfile ().
213
+ WithLabels (
214
+ lbls ... ,
215
+ ).ForStacktraceString ("foo" , "bar" , "baz" ).AddSamples (1 ))
216
+ }
217
+ return builders
218
+ }
219
+
220
+ func generatePoints (t * testing.T , from , through model.Time ) []* typesv1.Point {
221
+ t .Helper ()
222
+ var points []* typesv1.Point
223
+ for ts := from ; ts .Before (through ) || ts .Equal (through ); ts = ts .Add (time .Second ) {
224
+ points = append (points , & typesv1.Point {Timestamp : int64 (ts ), Value : 1 })
225
+ }
226
+ return points
227
+ }
228
+
229
+ func generateTimes (t * testing.T , from , through model.Time ) []model.Time {
230
+ t .Helper ()
231
+ var times []model.Time
232
+ for ts := from ; ts .Before (through ) || ts .Equal (through ); ts = ts .Add (time .Second ) {
233
+ times = append (times , ts )
234
+ }
235
+ return times
236
+ }
237
+
88
238
func TestProfileRowIterator (t * testing.T ) {
89
239
b := newBlock (t , func () []* testhelper.ProfileBuilder {
90
240
return []* testhelper.ProfileBuilder {
@@ -268,28 +418,22 @@ func TestSeriesRewriter(t *testing.T) {
268
418
})
269
419
rows , err := newProfileRowIterator (blk )
270
420
require .NoError (t , err )
271
- filePath := filepath .Join (t .TempDir (), block .IndexFilename )
272
- idxw , err := prepareIndexWriter (context .Background (), filePath , []BlockReader {blk })
273
- require .NoError (t , err )
274
- it := newSeriesRewriter (rows , idxw )
275
- // tests that all rows are written to the correct series index
276
- require .True (t , it .Next ())
277
- require .Equal (t , uint32 (0 ), it .At ().row .SeriesIndex ())
278
- require .True (t , it .Next ())
279
- require .Equal (t , uint32 (0 ), it .At ().row .SeriesIndex ())
280
- require .True (t , it .Next ())
281
- require .Equal (t , uint32 (0 ), it .At ().row .SeriesIndex ())
282
- require .True (t , it .Next ())
283
- require .Equal (t , uint32 (1 ), it .At ().row .SeriesIndex ())
284
- require .True (t , it .Next ())
285
- require .Equal (t , uint32 (2 ), it .At ().row .SeriesIndex ())
286
- require .True (t , it .Next ())
287
- require .Equal (t , uint32 (2 ), it .At ().row .SeriesIndex ())
288
- require .False (t , it .Next ())
421
+ path := t .TempDir ()
422
+ filePath := filepath .Join (path , block .IndexFilename )
423
+ idxw := newIndexRewriter (path )
424
+ seriesIdx := []uint32 {}
425
+ for rows .Next () {
426
+ r := rows .At ()
427
+ require .NoError (t , idxw .ReWriteRow (r ))
428
+ seriesIdx = append (seriesIdx , r .row .SeriesIndex ())
429
+ }
430
+ require .NoError (t , rows .Err ())
431
+ require .NoError (t , rows .Close ())
289
432
290
- require .NoError (t , it .Err ())
291
- require .NoError (t , it .Close ())
292
- require .NoError (t , idxw .Close ())
433
+ require .Equal (t , []uint32 {0 , 0 , 0 , 1 , 2 , 2 }, seriesIdx )
434
+
435
+ err = idxw .Close (context .Background ())
436
+ require .NoError (t , err )
293
437
294
438
idxr , err := index .NewFileReader (filePath )
295
439
require .NoError (t , err )
0 commit comments