Skip to content

Commit 09eb94a

Browse files
committed
reverting the stats computation under flag
1 parent 3cb7cf7 commit 09eb94a

7 files changed

Lines changed: 17 additions & 43 deletions

File tree

contentcoder.go

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -110,9 +110,7 @@ func (c *chunkedContentCoder) Close() error {
110110
}
111111

112112
func (c *chunkedContentCoder) incrementBytesWritten(val uint64) {
113-
if CollectDiskStats {
114-
atomic.AddUint64(&c.bytesWritten, val)
115-
}
113+
atomic.AddUint64(&c.bytesWritten, val)
116114
}
117115

118116
func (c *chunkedContentCoder) getBytesWritten() uint64 {

docvalues.go

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -147,9 +147,7 @@ func (di *docValueReader) ResetBytesRead(val uint64) {
147147
}
148148

149149
func (di *docValueReader) incrementBytesRead(val uint64) {
150-
if CollectDiskStats {
151-
atomic.AddUint64(&di.bytesRead, val)
152-
}
150+
atomic.AddUint64(&di.bytesRead, val)
153151
}
154152

155153
func (di *docValueReader) BytesWritten() uint64 {

intDecoder.go

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -59,9 +59,9 @@ func newChunkedIntDecoder(buf []byte, offset uint64, rv *chunkedIntDecoder) *chu
5959
rv.chunkOffsets[i], read = binary.Uvarint(buf[offset+n : offset+n+binary.MaxVarintLen64])
6060
n += uint64(read)
6161
}
62-
if CollectDiskStats {
63-
atomic.AddUint64(&rv.bytesRead, n)
64-
}
62+
63+
atomic.AddUint64(&rv.bytesRead, n)
64+
6565
rv.dataStartOffset = offset + n
6666
return rv
6767
}
@@ -91,9 +91,9 @@ func (d *chunkedIntDecoder) loadChunk(chunk int) error {
9191
start += s
9292
end += e
9393
d.curChunkBytes = d.data[start:end]
94-
if CollectDiskStats {
95-
atomic.AddUint64(&d.bytesRead, uint64(len(d.curChunkBytes)))
96-
}
94+
95+
atomic.AddUint64(&d.bytesRead, uint64(len(d.curChunkBytes)))
96+
9797
if d.r == nil {
9898
d.r = newMemUvarintReader(d.curChunkBytes)
9999
} else {

intcoder.go

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,9 +78,7 @@ func (c *chunkedIntCoder) SetChunkSize(chunkSize uint64, maxDocNum uint64) {
7878
}
7979

8080
func (c *chunkedIntCoder) incrementBytesWritten(val uint64) {
81-
if CollectDiskStats {
82-
atomic.AddUint64(&c.bytesWritten, val)
83-
}
81+
atomic.AddUint64(&c.bytesWritten, val)
8482
}
8583

8684
func (c *chunkedIntCoder) getBytesWritten() uint64 {

new.go

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -33,10 +33,6 @@ var NewSegmentBufferNumResultsBump int = 100
3333
var NewSegmentBufferNumResultsFactor float64 = 1.0
3434
var NewSegmentBufferAvgBytesPerDocFactor float64 = 1.0
3535

36-
// This flag controls the disk stats collection from the segment files
37-
// during indexing and querying
38-
var CollectDiskStats bool
39-
4036
// ValidateDocFields can be set by applications to perform additional checks
4137
// on fields in a document being added to a new segment, by default it does
4238
// nothing.
@@ -498,9 +494,7 @@ func (s *interim) getBytesWritten() uint64 {
498494
}
499495

500496
func (s *interim) incrementBytesWritten(val uint64) {
501-
if CollectDiskStats {
502-
atomic.AddUint64(&s.bytesWritten, val)
503-
}
497+
atomic.AddUint64(&s.bytesWritten, val)
504498
}
505499

506500
func (s *interim) writeStoredFields() (
@@ -617,9 +611,7 @@ func (s *interim) writeStoredFields() (
617611
}
618612

619613
func (s *interim) setBytesWritten(val uint64) {
620-
if CollectDiskStats {
621-
atomic.StoreUint64(&s.bytesWritten, val)
622-
}
614+
atomic.StoreUint64(&s.bytesWritten, val)
623615
}
624616

625617
func (s *interim) writeDicts() (fdvIndexOffset uint64, dictOffsets []uint64, err error) {

posting.go

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -263,9 +263,7 @@ func (p *PostingsList) BytesRead() uint64 {
263263
}
264264

265265
func (p *PostingsList) incrementBytesRead(val uint64) {
266-
if CollectDiskStats {
267-
atomic.AddUint64(&p.bytesRead, val)
268-
}
266+
atomic.AddUint64(&p.bytesRead, val)
269267
}
270268

271269
func (p *PostingsList) BytesWritten() uint64 {
@@ -378,9 +376,7 @@ func (i *PostingsIterator) BytesRead() uint64 {
378376
}
379377

380378
func (i *PostingsIterator) incrementBytesRead(val uint64) {
381-
if CollectDiskStats {
382-
atomic.AddUint64(&i.bytesRead, val)
383-
}
379+
atomic.AddUint64(&i.bytesRead, val)
384380
}
385381

386382
func (i *PostingsIterator) BytesWritten() uint64 {

segment.go

Lines changed: 4 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -228,9 +228,7 @@ func (s *Segment) loadConfig() error {
228228
// read from the on-disk segment as part of the current
229229
// query.
230230
func (s *Segment) ResetBytesRead(val uint64) {
231-
if CollectDiskStats {
232-
atomic.StoreUint64(&s.SegmentBase.bytesRead, val)
233-
}
231+
atomic.StoreUint64(&s.SegmentBase.bytesRead, val)
234232
}
235233

236234
func (s *Segment) BytesRead() uint64 {
@@ -243,19 +241,15 @@ func (s *Segment) BytesWritten() uint64 {
243241
}
244242

245243
func (s *Segment) incrementBytesRead(val uint64) {
246-
if CollectDiskStats {
247-
atomic.AddUint64(&s.bytesRead, val)
248-
}
244+
atomic.AddUint64(&s.bytesRead, val)
249245
}
250246

251247
func (s *SegmentBase) BytesWritten() uint64 {
252248
return atomic.LoadUint64(&s.bytesWritten)
253249
}
254250

255251
func (s *SegmentBase) setBytesWritten(val uint64) {
256-
if CollectDiskStats {
257-
atomic.AddUint64(&s.bytesWritten, val)
258-
}
252+
atomic.AddUint64(&s.bytesWritten, val)
259253
}
260254

261255
func (s *SegmentBase) BytesRead() uint64 {
@@ -265,9 +259,7 @@ func (s *SegmentBase) BytesRead() uint64 {
265259
func (s *SegmentBase) ResetBytesRead(val uint64) {}
266260

267261
func (s *SegmentBase) incrementBytesRead(val uint64) {
268-
if CollectDiskStats {
269-
atomic.AddUint64(&s.bytesRead, val)
270-
}
262+
atomic.AddUint64(&s.bytesRead, val)
271263
}
272264

273265
func (s *SegmentBase) loadFields() error {

0 commit comments

Comments
 (0)