pow: only support prime calculations on Go 1.8 and above

pull/3776/head
Péter Szilágyi 8 years ago committed by Felix Lange
parent 023670f6ba
commit df72e20cc5
  1. 8
      pow/ethash.go
  2. 37
      pow/ethash_algo.go
  3. 47
      pow/ethash_algo_go1.7.go
  4. 57
      pow/ethash_algo_go1.8.go
  5. 46
      pow/ethash_algo_go1.8_test.go
  6. 25
      pow/ethash_algo_test.go

@ -35,6 +35,7 @@ import (
) )
var ( var (
ErrNonceOutOfRange = errors.New("nonce out of range")
ErrInvalidDifficulty = errors.New("non-positive difficulty") ErrInvalidDifficulty = errors.New("non-positive difficulty")
ErrInvalidMixDigest = errors.New("invalid mix digest") ErrInvalidMixDigest = errors.New("invalid mix digest")
ErrInvalidPoW = errors.New("pow difficulty invalid") ErrInvalidPoW = errors.New("pow difficulty invalid")
@ -174,13 +175,18 @@ func NewSharedEthash() PoW {
// Verify implements PoW, checking whether the given block satisfies the PoW // Verify implements PoW, checking whether the given block satisfies the PoW
// difficulty requirements. // difficulty requirements.
func (ethash *Ethash) Verify(block Block) error { func (ethash *Ethash) Verify(block Block) error {
// Sanity check that the block number is below the lookup table size (60M blocks)
number := block.NumberU64()
if number/epochLength >= uint64(len(cacheSizes)) {
// Go < 1.7 cannot calculate new cache/dataset sizes (no fast prime check)
return ErrNonceOutOfRange
}
// Ensure twe have a valid difficulty for the block // Ensure twe have a valid difficulty for the block
difficulty := block.Difficulty() difficulty := block.Difficulty()
if difficulty.Sign() <= 0 { if difficulty.Sign() <= 0 {
return ErrInvalidDifficulty return ErrInvalidDifficulty
} }
// Recompute the digest and PoW value and verify against the block // Recompute the digest and PoW value and verify against the block
number := block.NumberU64()
cache := ethash.cache(number) cache := ethash.cache(number)
size := datasetSize(number) size := datasetSize(number)

@ -19,7 +19,6 @@ package pow
import ( import (
"encoding/binary" "encoding/binary"
"io" "io"
"math/big"
"runtime" "runtime"
"sync" "sync"
"sync/atomic" "sync/atomic"
@ -45,42 +44,6 @@ const (
loopAccesses = 64 // Number of accesses in hashimoto loop loopAccesses = 64 // Number of accesses in hashimoto loop
) )
// cacheSize calculates and returns the size of the ethash verification cache that
// belongs to a certain block number. The cache size grows linearly, however, we
// always take the highest prime below the linearly growing threshold in order to
// reduce the risk of accidental regularities leading to cyclic behavior.
func cacheSize(block uint64) uint64 {
// If we have a pre-generated value, use that
epoch := int(block / epochLength)
if epoch < len(cacheSizes) {
return cacheSizes[epoch]
}
// No known cache size, calculate manually (sanity branch only)
size := uint64(cacheInitBytes + cacheGrowthBytes*epoch - hashBytes)
for !new(big.Int).SetUint64(size / hashBytes).ProbablyPrime(1) { // Always accurate for n < 2^64
size -= 2 * hashBytes
}
return size
}
// datasetSize calculates and returns the size of the ethash mining dataset that
// belongs to a certain block number. The dataset size grows linearly, however, we
// always take the highest prime below the linearly growing threshold in order to
// reduce the risk of accidental regularities leading to cyclic behavior.
func datasetSize(block uint64) uint64 {
// If we have a pre-generated value, use that
epoch := int(block / epochLength)
if epoch < len(datasetSizes) {
return datasetSizes[epoch]
}
// No known dataset size, calculate manually (sanity branch only)
size := uint64(datasetInitBytes + datasetGrowthBytes*epoch - mixBytes)
for !new(big.Int).SetUint64(size / mixBytes).ProbablyPrime(1) { // Always accurate for n < 2^64
size -= 2 * mixBytes
}
return size
}
// seedHash is the seed to use for generating a verification cache and the mining // seedHash is the seed to use for generating a verification cache and the mining
// dataset. // dataset.
func seedHash(block uint64) []byte { func seedHash(block uint64) []byte {

@ -0,0 +1,47 @@
// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
// +build !go1.8
package pow
// cacheSize calculates and returns the size of the ethash verification cache that
// belongs to a certain block number. The cache size grows linearly, however, we
// always take the highest prime below the linearly growing threshold in order to
// reduce the risk of accidental regularities leading to cyclic behavior.
func cacheSize(block uint64) uint64 {
// If we have a pre-generated value, use that
epoch := int(block / epochLength)
if epoch < len(cacheSizes) {
return cacheSizes[epoch]
}
// We don't have a way to verify primes fast before Go 1.8
panic("fast prime testing unsupported in Go < 1.8")
}
// datasetSize calculates and returns the size of the ethash mining dataset that
// belongs to a certain block number. The dataset size grows linearly, however, we
// always take the highest prime below the linearly growing threshold in order to
// reduce the risk of accidental regularities leading to cyclic behavior.
func datasetSize(block uint64) uint64 {
// If we have a pre-generated value, use that
epoch := int(block / epochLength)
if epoch < len(datasetSizes) {
return datasetSizes[epoch]
}
// We don't have a way to verify primes fast before Go 1.8
panic("fast prime testing unsupported in Go < 1.8")
}

@ -0,0 +1,57 @@
// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
// +build go1.8
package pow
import "math/big"
// cacheSize calculates and returns the size of the ethash verification cache that
// belongs to a certain block number. The cache size grows linearly, however, we
// always take the highest prime below the linearly growing threshold in order to
// reduce the risk of accidental regularities leading to cyclic behavior.
func cacheSize(block uint64) uint64 {
// If we have a pre-generated value, use that
epoch := int(block / epochLength)
if epoch < len(cacheSizes) {
return cacheSizes[epoch]
}
// No known cache size, calculate manually (sanity branch only)
size := uint64(cacheInitBytes + cacheGrowthBytes*uint64(epoch) - hashBytes)
for !new(big.Int).SetUint64(size / hashBytes).ProbablyPrime(1) { // Always accurate for n < 2^64
size -= 2 * hashBytes
}
return size
}
// datasetSize calculates and returns the size of the ethash mining dataset that
// belongs to a certain block number. The dataset size grows linearly, however, we
// always take the highest prime below the linearly growing threshold in order to
// reduce the risk of accidental regularities leading to cyclic behavior.
func datasetSize(block uint64) uint64 {
// If we have a pre-generated value, use that
epoch := int(block / epochLength)
if epoch < len(datasetSizes) {
return datasetSizes[epoch]
}
// No known dataset size, calculate manually (sanity branch only)
size := uint64(datasetInitBytes + datasetGrowthBytes*uint64(epoch) - mixBytes)
for !new(big.Int).SetUint64(size / mixBytes).ProbablyPrime(1) { // Always accurate for n < 2^64
size -= 2 * mixBytes
}
return size
}

@ -0,0 +1,46 @@
// Copyright 2017 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
// +build go1.8
package pow
import "testing"
// Tests whether the dataset size calculator work correctly by cross checking the
// hard coded lookup table with the value generated by it.
func TestSizeCalculations(t *testing.T) {
var tests []uint64
// Verify all the cache sizes from the lookup table
defer func(sizes []uint64) { cacheSizes = sizes }(cacheSizes)
tests, cacheSizes = cacheSizes, []uint64{}
for i, test := range tests {
if size := cacheSize(uint64(i*epochLength) + 1); size != test {
t.Errorf("cache %d: cache size mismatch: have %d, want %d", i, size, test)
}
}
// Verify all the dataset sizes from the lookup table
defer func(sizes []uint64) { datasetSizes = sizes }(datasetSizes)
tests, datasetSizes = datasetSizes, []uint64{}
for i, test := range tests {
if size := datasetSize(uint64(i*epochLength) + 1); size != test {
t.Errorf("dataset %d: dataset size mismatch: have %d, want %d", i, size, test)
}
}
}

@ -23,31 +23,6 @@ import (
"github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/common/hexutil"
) )
// Tests whether the dataset size calculator work correctly by cross checking the
// hard coded lookup table with the value generated by it.
func TestSizeCalculations(t *testing.T) {
var tests []uint64
// Verify all the cache sizes from the lookup table
defer func(sizes []uint64) { cacheSizes = sizes }(cacheSizes)
tests, cacheSizes = cacheSizes, []uint64{}
for i, test := range tests {
if size := cacheSize(uint64(i*epochLength) + 1); size != test {
t.Errorf("cache %d: cache size mismatch: have %d, want %d", i, size, test)
}
}
// Verify all the dataset sizes from the lookup table
defer func(sizes []uint64) { datasetSizes = sizes }(datasetSizes)
tests, datasetSizes = datasetSizes, []uint64{}
for i, test := range tests {
if size := datasetSize(uint64(i*epochLength) + 1); size != test {
t.Errorf("dataset %d: dataset size mismatch: have %d, want %d", i, size, test)
}
}
}
// Tests that verification caches can be correctly generated. // Tests that verification caches can be correctly generated.
func TestCacheGeneration(t *testing.T) { func TestCacheGeneration(t *testing.T) {
tests := []struct { tests := []struct {

Loading…
Cancel
Save