core/state/snapshot: use AddHash/ContainHash instead of Hasher interface (#28849)

This change switches from using the `Hasher` interface to add/query the bloomfilter to implementing it as methods.
This significantly reduces the allocations for Search and Rebloom.
pull/28837/head
Marius van der Wijden 10 months ago committed by GitHub
parent 4c8d92d303
commit c89a3da7d9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 21
      core/state/pruner/bloom.go
  2. 57
      core/state/snapshot/difflayer.go

@ -27,17 +27,10 @@ import (
bloomfilter "github.com/holiman/bloomfilter/v2" bloomfilter "github.com/holiman/bloomfilter/v2"
) )
// stateBloomHasher is a wrapper around a byte blob to satisfy the interface API // stateBloomHash is used to convert a trie hash or contract code hash into a 64 bit mini hash.
// requirements of the bloom library used. It's used to convert a trie hash or func stateBloomHash(f []byte) uint64 {
// contract code hash into a 64 bit mini hash. return binary.BigEndian.Uint64(f)
type stateBloomHasher []byte }
func (f stateBloomHasher) Write(p []byte) (n int, err error) { panic("not implemented") }
func (f stateBloomHasher) Sum(b []byte) []byte { panic("not implemented") }
func (f stateBloomHasher) Reset() { panic("not implemented") }
func (f stateBloomHasher) BlockSize() int { panic("not implemented") }
func (f stateBloomHasher) Size() int { return 8 }
func (f stateBloomHasher) Sum64() uint64 { return binary.BigEndian.Uint64(f) }
// stateBloom is a bloom filter used during the state conversion(snapshot->state). // stateBloom is a bloom filter used during the state conversion(snapshot->state).
// The keys of all generated entries will be recorded here so that in the pruning // The keys of all generated entries will be recorded here so that in the pruning
@ -113,10 +106,10 @@ func (bloom *stateBloom) Put(key []byte, value []byte) error {
if !isCode { if !isCode {
return errors.New("invalid entry") return errors.New("invalid entry")
} }
bloom.bloom.Add(stateBloomHasher(codeKey)) bloom.bloom.AddHash(stateBloomHash(codeKey))
return nil return nil
} }
bloom.bloom.Add(stateBloomHasher(key)) bloom.bloom.AddHash(stateBloomHash(key))
return nil return nil
} }
@ -128,5 +121,5 @@ func (bloom *stateBloom) Delete(key []byte) error { panic("not supported") }
// - If it says yes, the key may be contained // - If it says yes, the key may be contained
// - If it says no, the key is definitely not contained. // - If it says no, the key is definitely not contained.
func (bloom *stateBloom) Contain(key []byte) bool { func (bloom *stateBloom) Contain(key []byte) bool {
return bloom.bloom.Contains(stateBloomHasher(key)) return bloom.bloom.ContainsHash(stateBloomHash(key))
} }

@ -124,47 +124,20 @@ type diffLayer struct {
lock sync.RWMutex lock sync.RWMutex
} }
// destructBloomHasher is a wrapper around a common.Hash to satisfy the interface // destructBloomHash is used to convert a destruct event into a 64 bit mini hash.
// API requirements of the bloom library used. It's used to convert a destruct func destructBloomHash(h common.Hash) uint64 {
// event into a 64 bit mini hash.
type destructBloomHasher common.Hash
func (h destructBloomHasher) Write(p []byte) (n int, err error) { panic("not implemented") }
func (h destructBloomHasher) Sum(b []byte) []byte { panic("not implemented") }
func (h destructBloomHasher) Reset() { panic("not implemented") }
func (h destructBloomHasher) BlockSize() int { panic("not implemented") }
func (h destructBloomHasher) Size() int { return 8 }
func (h destructBloomHasher) Sum64() uint64 {
return binary.BigEndian.Uint64(h[bloomDestructHasherOffset : bloomDestructHasherOffset+8]) return binary.BigEndian.Uint64(h[bloomDestructHasherOffset : bloomDestructHasherOffset+8])
} }
// accountBloomHasher is a wrapper around a common.Hash to satisfy the interface // accountBloomHash is used to convert an account hash into a 64 bit mini hash.
// API requirements of the bloom library used. It's used to convert an account func accountBloomHash(h common.Hash) uint64 {
// hash into a 64 bit mini hash.
type accountBloomHasher common.Hash
func (h accountBloomHasher) Write(p []byte) (n int, err error) { panic("not implemented") }
func (h accountBloomHasher) Sum(b []byte) []byte { panic("not implemented") }
func (h accountBloomHasher) Reset() { panic("not implemented") }
func (h accountBloomHasher) BlockSize() int { panic("not implemented") }
func (h accountBloomHasher) Size() int { return 8 }
func (h accountBloomHasher) Sum64() uint64 {
return binary.BigEndian.Uint64(h[bloomAccountHasherOffset : bloomAccountHasherOffset+8]) return binary.BigEndian.Uint64(h[bloomAccountHasherOffset : bloomAccountHasherOffset+8])
} }
// storageBloomHasher is a wrapper around a [2]common.Hash to satisfy the interface // storageBloomHash is used to convert an account hash and a storage hash into a 64 bit mini hash.
// API requirements of the bloom library used. It's used to convert an account func storageBloomHash(h0, h1 common.Hash) uint64 {
// hash into a 64 bit mini hash. return binary.BigEndian.Uint64(h0[bloomStorageHasherOffset:bloomStorageHasherOffset+8]) ^
type storageBloomHasher [2]common.Hash binary.BigEndian.Uint64(h1[bloomStorageHasherOffset:bloomStorageHasherOffset+8])
func (h storageBloomHasher) Write(p []byte) (n int, err error) { panic("not implemented") }
func (h storageBloomHasher) Sum(b []byte) []byte { panic("not implemented") }
func (h storageBloomHasher) Reset() { panic("not implemented") }
func (h storageBloomHasher) BlockSize() int { panic("not implemented") }
func (h storageBloomHasher) Size() int { return 8 }
func (h storageBloomHasher) Sum64() uint64 {
return binary.BigEndian.Uint64(h[0][bloomStorageHasherOffset:bloomStorageHasherOffset+8]) ^
binary.BigEndian.Uint64(h[1][bloomStorageHasherOffset:bloomStorageHasherOffset+8])
} }
// newDiffLayer creates a new diff on top of an existing snapshot, whether that's a low // newDiffLayer creates a new diff on top of an existing snapshot, whether that's a low
@ -233,14 +206,14 @@ func (dl *diffLayer) rebloom(origin *diskLayer) {
} }
// Iterate over all the accounts and storage slots and index them // Iterate over all the accounts and storage slots and index them
for hash := range dl.destructSet { for hash := range dl.destructSet {
dl.diffed.Add(destructBloomHasher(hash)) dl.diffed.AddHash(destructBloomHash(hash))
} }
for hash := range dl.accountData { for hash := range dl.accountData {
dl.diffed.Add(accountBloomHasher(hash)) dl.diffed.AddHash(accountBloomHash(hash))
} }
for accountHash, slots := range dl.storageData { for accountHash, slots := range dl.storageData {
for storageHash := range slots { for storageHash := range slots {
dl.diffed.Add(storageBloomHasher{accountHash, storageHash}) dl.diffed.AddHash(storageBloomHash(accountHash, storageHash))
} }
} }
// Calculate the current false positive rate and update the error rate meter. // Calculate the current false positive rate and update the error rate meter.
@ -301,9 +274,9 @@ func (dl *diffLayer) AccountRLP(hash common.Hash) ([]byte, error) {
} }
// Check the bloom filter first whether there's even a point in reaching into // Check the bloom filter first whether there's even a point in reaching into
// all the maps in all the layers below // all the maps in all the layers below
hit := dl.diffed.Contains(accountBloomHasher(hash)) hit := dl.diffed.ContainsHash(accountBloomHash(hash))
if !hit { if !hit {
hit = dl.diffed.Contains(destructBloomHasher(hash)) hit = dl.diffed.ContainsHash(destructBloomHash(hash))
} }
var origin *diskLayer var origin *diskLayer
if !hit { if !hit {
@ -372,9 +345,9 @@ func (dl *diffLayer) Storage(accountHash, storageHash common.Hash) ([]byte, erro
dl.lock.RUnlock() dl.lock.RUnlock()
return nil, ErrSnapshotStale return nil, ErrSnapshotStale
} }
hit := dl.diffed.Contains(storageBloomHasher{accountHash, storageHash}) hit := dl.diffed.ContainsHash(storageBloomHash(accountHash, storageHash))
if !hit { if !hit {
hit = dl.diffed.Contains(destructBloomHasher(accountHash)) hit = dl.diffed.ContainsHash(destructBloomHash(accountHash))
} }
var origin *diskLayer var origin *diskLayer
if !hit { if !hit {

Loading…
Cancel
Save