Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 5 additions & 11 deletions dataRetriever/dataPool/proofsCache/export_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,16 @@ func NewProofsCache(bucketSize int) *proofsCache {
return newProofsCache(bucketSize)
}

// HeadBucketSize -
func (pc *proofsCache) HeadBucketSize() int {
if len(pc.proofsByNonceBuckets) > 0 {
return len(pc.proofsByNonceBuckets[0].proofsByNonce)
}

return 0
}

// HeadBucketSize -
func (pc *proofsCache) FullProofsByNonceSize() int {
size := 0

for _, bucket := range pc.proofsByNonceBuckets {
pc.proofsByNonceBuckets.Range(func(key, value interface{}) bool {
bucket := value.(*proofNonceBucket)
size += bucket.size()
}

return true
})

return size
}
Expand Down
17 changes: 4 additions & 13 deletions dataRetriever/dataPool/proofsCache/proofsBucket.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,30 +4,21 @@ import "github.com/multiversx/mx-chain-core-go/data"

type proofNonceBucket struct {
maxNonce uint64
proofsByNonce []*proofNonceMapping
bucketSize int
proofsByNonce map[uint64]string
}

func newProofBucket(bucketSize int) *proofNonceBucket {
func newProofBucket() *proofNonceBucket {
return &proofNonceBucket{
proofsByNonce: make([]*proofNonceMapping, 0),
bucketSize: bucketSize,
proofsByNonce: make(map[uint64]string),
}
}

func (p *proofNonceBucket) size() int {
return len(p.proofsByNonce)
}

func (p *proofNonceBucket) isFull() bool {
return len(p.proofsByNonce) >= p.bucketSize
}

func (p *proofNonceBucket) insert(proof data.HeaderProofHandler) {
p.proofsByNonce = append(p.proofsByNonce, &proofNonceMapping{
headerHash: string(proof.GetHeaderHash()),
nonce: proof.GetHeaderNonce(),
})
p.proofsByNonce[proof.GetHeaderNonce()] = string(proof.GetHeaderHash())

if proof.GetHeaderNonce() > p.maxNonce {
p.maxNonce = proof.GetHeaderNonce()
Expand Down
57 changes: 27 additions & 30 deletions dataRetriever/dataPool/proofsCache/proofsCache.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,15 @@ type proofNonceMapping struct {

type proofsCache struct {
mutProofsCache sync.RWMutex
proofsByNonceBuckets []*proofNonceBucket
bucketSize int
proofsByNonceBuckets sync.Map
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

proofNonceMapping struct not used anymore

Copy link
Contributor Author

@ssd04 ssd04 Mar 18, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍 deleted it

bucketSize uint64
proofsByHash map[string]data.HeaderProofHandler
}

func newProofsCache(bucketSize int) *proofsCache {
return &proofsCache{
proofsByNonceBuckets: make([]*proofNonceBucket, 0),
bucketSize: bucketSize,
proofsByNonceBuckets: sync.Map{},
bucketSize: uint64(bucketSize),
proofsByHash: make(map[string]data.HeaderProofHandler),
}
}
Expand Down Expand Up @@ -52,27 +52,18 @@ func (pc *proofsCache) addProof(proof data.HeaderProofHandler) {
pc.proofsByHash[string(proof.GetHeaderHash())] = proof
}

func (pc *proofsCache) insertProofByNonce(proof data.HeaderProofHandler) {
if len(pc.proofsByNonceBuckets) == 0 {
pc.insertInNewBucket(proof)
return
}

headBucket := pc.proofsByNonceBuckets[0]

if headBucket.isFull() {
pc.insertInNewBucket(proof)
return
}

headBucket.insert(proof)
// getBucketKey will return bucket key as lower bound window value
func (pc *proofsCache) getBucketKey(index uint64) uint64 {
return (index / pc.bucketSize) * pc.bucketSize
}

func (pc *proofsCache) insertInNewBucket(proof data.HeaderProofHandler) {
bucket := newProofBucket(pc.bucketSize)
bucket.insert(proof)
func (pc *proofsCache) insertProofByNonce(proof data.HeaderProofHandler) {
bucketKey := pc.getBucketKey(proof.GetHeaderNonce())

bucket, _ := pc.proofsByNonceBuckets.LoadOrStore(bucketKey, newProofBucket())

pc.proofsByNonceBuckets = append([]*proofNonceBucket{bucket}, pc.proofsByNonceBuckets...)
b := bucket.(*proofNonceBucket)
b.insert(proof)
}

func (pc *proofsCache) cleanupProofsBehindNonce(nonce uint64) {
Expand All @@ -83,22 +74,28 @@ func (pc *proofsCache) cleanupProofsBehindNonce(nonce uint64) {
pc.mutProofsCache.Lock()
defer pc.mutProofsCache.Unlock()

buckets := make([]*proofNonceBucket, 0)
bucketsToDelete := make([]uint64, 0)

pc.proofsByNonceBuckets.Range(func(key, value interface{}) bool {
bucketKey := key.(uint64)
bucket := value.(*proofNonceBucket)

for _, bucket := range pc.proofsByNonceBuckets {
if nonce > bucket.maxNonce {
pc.cleanupProofsInBucket(bucket)
continue
bucketsToDelete = append(bucketsToDelete, bucketKey)
pc.proofsByNonceBuckets.Delete(key)
}

buckets = append(buckets, bucket)
}
return true
})

pc.proofsByNonceBuckets = buckets
for _, key := range bucketsToDelete {
pc.proofsByNonceBuckets.Delete(key)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is already done on L86

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

right, old code; removed

}
}

func (pc *proofsCache) cleanupProofsInBucket(bucket *proofNonceBucket) {
for _, proofInfo := range bucket.proofsByNonce {
delete(pc.proofsByHash, proofInfo.headerHash)
for _, headerHash := range bucket.proofsByNonce {
delete(pc.proofsByHash, headerHash)
}
}
37 changes: 14 additions & 23 deletions dataRetriever/dataPool/proofsCache/proofsCache_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,80 +18,72 @@ func TestProofsCache(t *testing.T) {
t.Run("incremental nonces, should cleanup all caches", func(t *testing.T) {
t.Parallel()

proof0 := &block.HeaderProof{HeaderHash: []byte{0}, HeaderNonce: 0}
proof1 := &block.HeaderProof{HeaderHash: []byte{1}, HeaderNonce: 1}
proof2 := &block.HeaderProof{HeaderHash: []byte{2}, HeaderNonce: 2}
proof3 := &block.HeaderProof{HeaderHash: []byte{3}, HeaderNonce: 3}
proof4 := &block.HeaderProof{HeaderHash: []byte{4}, HeaderNonce: 4}
proof5 := &block.HeaderProof{HeaderHash: []byte{5}, HeaderNonce: 5}

pc := proofscache.NewProofsCache(4)

pc.AddProof(proof0)
pc.AddProof(proof1)
pc.AddProof(proof2)
pc.AddProof(proof3)
pc.AddProof(proof4)

require.Equal(t, 4, pc.FullProofsByNonceSize())
require.Equal(t, 4, pc.ProofsByHashSize())

pc.AddProof(proof5) // added to new head bucket
pc.AddProof(proof4) // added to new head bucket

require.Equal(t, 1, pc.HeadBucketSize())
require.Equal(t, 5, pc.ProofsByHashSize())

pc.CleanupProofsBehindNonce(5)
require.Equal(t, 1, pc.HeadBucketSize())
pc.CleanupProofsBehindNonce(4)
require.Equal(t, 1, pc.ProofsByHashSize())

pc.CleanupProofsBehindNonce(10)
require.Equal(t, 0, pc.HeadBucketSize())
require.Equal(t, 0, pc.ProofsByHashSize())
})

t.Run("non incremental nonces", func(t *testing.T) {
t.Parallel()

proof0 := &block.HeaderProof{HeaderHash: []byte{0}, HeaderNonce: 0}
proof1 := &block.HeaderProof{HeaderHash: []byte{1}, HeaderNonce: 1}
proof2 := &block.HeaderProof{HeaderHash: []byte{2}, HeaderNonce: 2}
proof3 := &block.HeaderProof{HeaderHash: []byte{3}, HeaderNonce: 3}
proof4 := &block.HeaderProof{HeaderHash: []byte{4}, HeaderNonce: 4}
proof5 := &block.HeaderProof{HeaderHash: []byte{5}, HeaderNonce: 5}
proof6 := &block.HeaderProof{HeaderHash: []byte{6}, HeaderNonce: 6}

pc := proofscache.NewProofsCache(4)

pc.AddProof(proof5)
pc.AddProof(proof4)
pc.AddProof(proof1)
pc.AddProof(proof2)
pc.AddProof(proof3)
pc.AddProof(proof4)

require.Equal(t, 4, pc.FullProofsByNonceSize())
require.Equal(t, 4, pc.ProofsByHashSize())

pc.AddProof(proof1) // added to new head bucket
pc.AddProof(proof0) // added to new head bucket

require.Equal(t, 1, pc.HeadBucketSize())
require.Equal(t, 5, pc.FullProofsByNonceSize())
require.Equal(t, 5, pc.ProofsByHashSize())

pc.CleanupProofsBehindNonce(5)
pc.CleanupProofsBehindNonce(4)

// cleanup up head bucket with only one proof
require.Equal(t, 4, pc.HeadBucketSize())
require.Equal(t, 4, pc.ProofsByHashSize())
require.Equal(t, 1, pc.ProofsByHashSize())

pc.AddProof(proof6) // added to new head bucket
pc.AddProof(proof5) // added to new head bucket

require.Equal(t, 1, pc.HeadBucketSize())
require.Equal(t, 5, pc.ProofsByHashSize())
require.Equal(t, 2, pc.ProofsByHashSize())

pc.CleanupProofsBehindNonce(5) // will not remove any bucket
require.Equal(t, 1, pc.HeadBucketSize())
require.Equal(t, 5, pc.FullProofsByNonceSize())
require.Equal(t, 5, pc.ProofsByHashSize())
require.Equal(t, 2, pc.FullProofsByNonceSize())
require.Equal(t, 2, pc.ProofsByHashSize())

pc.CleanupProofsBehindNonce(10)
require.Equal(t, 0, pc.HeadBucketSize())
require.Equal(t, 0, pc.ProofsByHashSize())
})

Expand All @@ -109,7 +101,6 @@ func TestProofsCache(t *testing.T) {
pc.AddProof(proof)
}

require.Equal(t, 10, pc.HeadBucketSize())
require.Equal(t, 100, pc.FullProofsByNonceSize())
require.Equal(t, 100, pc.ProofsByHashSize())

Expand Down