From ba295ec6feb8288bb1e0cacca4ed2c3c5515d133 Mon Sep 17 00:00:00 2001 From: Jason Carver Date: Thu, 21 May 2015 08:50:01 -0700 Subject: [PATCH 01/34] Log locally mined blocks, after they are 5-deep in the chain This helps determine which blocks are unlikely to end up as uncles * Store the 5 most recent locally mined block numbers * On every imported block, check if the 5-deep block num is in that store * Also confirm that the block is signed with miner's coinbase Why not just check the coinbase? This log is useful if you're running multiple miners and want to know if *this* miner is performing well. --- miner/worker.go | 59 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/miner/worker.go b/miner/worker.go index 5e4ff7510f..5b4af64cb4 100644 --- a/miner/worker.go +++ b/miner/worker.go @@ -38,6 +38,13 @@ type Agent interface { GetHashRate() int64 } +const MINING_LOG_AT_DEPTH = 5 + +type UInt64RingBuffer struct { + ints []uint64 //array of all integers in buffer + next int //where is the next insertion? assert 0 <= next < len(ints) +} + // environment is the workers current environment and holds // all of the current state information type environment struct { @@ -54,6 +61,7 @@ type environment struct { lowGasTransactors *set.Set ownedAccounts *set.Set lowGasTxs types.Transactions + localMinedBlocks *UInt64RingBuffer // the most recent block numbers that were mined locally (used to check block inclusion) } // env returns a new environment for the current cycle @@ -209,6 +217,18 @@ out: events.Unsubscribe() } +func newLocalMinedBlock(blockNumber uint64, prevMinedBlocks *UInt64RingBuffer) (minedBlocks *UInt64RingBuffer) { + if prevMinedBlocks == nil { + minedBlocks = &UInt64RingBuffer{next: 0, ints: make([]uint64, MINING_LOG_AT_DEPTH)} + } else { + minedBlocks = prevMinedBlocks + } + + minedBlocks.ints[minedBlocks.next] = blockNumber + minedBlocks.next = (minedBlocks.next + 1) % len(minedBlocks.ints) + return minedBlocks +} + func (self *worker) wait() { for { for block := range self.recv { @@ -232,6 +252,8 @@ func (self *worker) wait() { glog.V(logger.Info).Infof("🔨 Mined %sblock #%v (%x)", stale, block.Number(), block.Hash().Bytes()[:4]) + self.current.localMinedBlocks = newLocalMinedBlock(block.Number().Uint64(), self.current.localMinedBlocks) + jsonlogger.LogJson(&logger.EthMinerNewBlock{ BlockHash: block.Hash().Hex(), BlockNumber: block.Number(), @@ -286,6 +308,9 @@ func (self *worker) makeCurrent() { current.ignoredTransactors = set.New() current.lowGasTransactors = set.New() current.ownedAccounts = accountAddressesSet(accounts) + if self.current != nil { + current.localMinedBlocks = self.current.localMinedBlocks + } parent := self.chain.GetBlock(current.block.ParentHash()) current.coinbase.SetGasPool(core.CalcGasLimit(parent)) @@ -304,6 +329,38 @@ func (w *worker) setGasPrice(p *big.Int) { w.mux.Post(core.GasPriceChanged{w.gasPrice}) } +func (self *worker) isBlockLocallyMined(deepBlockNum uint64) bool { + //Did this instance mine a block at {deepBlockNum} ? + var isLocal = false + for idx, blockNum := range self.current.localMinedBlocks.ints { + if deepBlockNum == blockNum { + isLocal = true + self.current.localMinedBlocks.ints[idx] = 0 //prevent showing duplicate logs + break + } + } + //Short-circuit on false, because the previous and following tests must both be true + if !isLocal { + return false + } + + //Does the block at {deepBlockNum} send earnings to my coinbase? + var block = self.chain.GetBlockByNumber(deepBlockNum) + return block.Header().Coinbase == self.coinbase +} + +func (self *worker) logLocalMinedBlocks(previous *environment) { + if previous != nil && self.current.localMinedBlocks != nil { + nextBlockNum := self.current.block.Number().Uint64() + for checkBlockNum := previous.block.Number().Uint64(); checkBlockNum < nextBlockNum; checkBlockNum++ { + inspectBlockNum := checkBlockNum - MINING_LOG_AT_DEPTH + if self.isBlockLocallyMined(inspectBlockNum) { + glog.V(logger.Info).Infof("🔨 🔗 Mined %d blocks back: block #%v", MINING_LOG_AT_DEPTH, inspectBlockNum) + } + } + } +} + func (self *worker) commitNewWork() { self.mu.Lock() defer self.mu.Unlock() @@ -312,6 +369,7 @@ func (self *worker) commitNewWork() { self.currentMu.Lock() defer self.currentMu.Unlock() + previous := self.current self.makeCurrent() current := self.current @@ -347,6 +405,7 @@ func (self *worker) commitNewWork() { // We only care about logging if we're actually mining if atomic.LoadInt32(&self.mining) == 1 { glog.V(logger.Info).Infof("commit new work on block %v with %d txs & %d uncles\n", current.block.Number(), current.tcount, len(uncles)) + self.logLocalMinedBlocks(previous) } for _, hash := range badUncles { From 8a7fb5fd342ee9d4c2e8609d4c008f12c5956a41 Mon Sep 17 00:00:00 2001 From: Jason Carver Date: Sat, 23 May 2015 12:00:18 -0700 Subject: [PATCH 02/34] do not export constant for when to log a deep block you mined --- miner/worker.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/miner/worker.go b/miner/worker.go index 5b4af64cb4..937e98e438 100644 --- a/miner/worker.go +++ b/miner/worker.go @@ -38,7 +38,7 @@ type Agent interface { GetHashRate() int64 } -const MINING_LOG_AT_DEPTH = 5 +const miningLogAtDepth = 5 type UInt64RingBuffer struct { ints []uint64 //array of all integers in buffer @@ -219,7 +219,7 @@ out: func newLocalMinedBlock(blockNumber uint64, prevMinedBlocks *UInt64RingBuffer) (minedBlocks *UInt64RingBuffer) { if prevMinedBlocks == nil { - minedBlocks = &UInt64RingBuffer{next: 0, ints: make([]uint64, MINING_LOG_AT_DEPTH)} + minedBlocks = &UInt64RingBuffer{next: 0, ints: make([]uint64, miningLogAtDepth)} } else { minedBlocks = prevMinedBlocks } @@ -353,9 +353,9 @@ func (self *worker) logLocalMinedBlocks(previous *environment) { if previous != nil && self.current.localMinedBlocks != nil { nextBlockNum := self.current.block.Number().Uint64() for checkBlockNum := previous.block.Number().Uint64(); checkBlockNum < nextBlockNum; checkBlockNum++ { - inspectBlockNum := checkBlockNum - MINING_LOG_AT_DEPTH + inspectBlockNum := checkBlockNum - miningLogAtDepth if self.isBlockLocallyMined(inspectBlockNum) { - glog.V(logger.Info).Infof("🔨 🔗 Mined %d blocks back: block #%v", MINING_LOG_AT_DEPTH, inspectBlockNum) + glog.V(logger.Info).Infof("🔨 🔗 Mined %d blocks back: block #%v", miningLogAtDepth, inspectBlockNum) } } } From f1ce5877badf5624a5fc5214dc18086b930c8d38 Mon Sep 17 00:00:00 2001 From: Jason Carver Date: Sat, 23 May 2015 12:04:00 -0700 Subject: [PATCH 03/34] do not export ring buffer struct --- miner/worker.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/miner/worker.go b/miner/worker.go index 937e98e438..6f618d632d 100644 --- a/miner/worker.go +++ b/miner/worker.go @@ -40,7 +40,7 @@ type Agent interface { const miningLogAtDepth = 5 -type UInt64RingBuffer struct { +type uint64RingBuffer struct { ints []uint64 //array of all integers in buffer next int //where is the next insertion? assert 0 <= next < len(ints) } @@ -61,7 +61,7 @@ type environment struct { lowGasTransactors *set.Set ownedAccounts *set.Set lowGasTxs types.Transactions - localMinedBlocks *UInt64RingBuffer // the most recent block numbers that were mined locally (used to check block inclusion) + localMinedBlocks *uint64RingBuffer // the most recent block numbers that were mined locally (used to check block inclusion) } // env returns a new environment for the current cycle @@ -217,9 +217,9 @@ out: events.Unsubscribe() } -func newLocalMinedBlock(blockNumber uint64, prevMinedBlocks *UInt64RingBuffer) (minedBlocks *UInt64RingBuffer) { +func newLocalMinedBlock(blockNumber uint64, prevMinedBlocks *uint64RingBuffer) (minedBlocks *uint64RingBuffer) { if prevMinedBlocks == nil { - minedBlocks = &UInt64RingBuffer{next: 0, ints: make([]uint64, miningLogAtDepth)} + minedBlocks = &uint64RingBuffer{next: 0, ints: make([]uint64, miningLogAtDepth)} } else { minedBlocks = prevMinedBlocks } From 9253fc337e4f36029f90f31b1b4e116d0a77ae05 Mon Sep 17 00:00:00 2001 From: Felix Lange Date: Wed, 27 May 2015 00:52:02 +0200 Subject: [PATCH 04/34] cmd/geth: exit the console cleanly when interrupted This fix applies mostly to unsupported terminals that do not trigger the special interrupt handling in liner. Supported terminals were covered because liner.Prompt returns an error if Ctrl-C is pressed. --- cmd/geth/js.go | 64 ++++++++++++++++++++++++++++++++++---------------- 1 file changed, 44 insertions(+), 20 deletions(-) diff --git a/cmd/geth/js.go b/cmd/geth/js.go index 0fb234d455..706bc6554f 100644 --- a/cmd/geth/js.go +++ b/cmd/geth/js.go @@ -22,6 +22,7 @@ import ( "fmt" "math/big" "os" + "os/signal" "path/filepath" "strings" @@ -47,7 +48,8 @@ type dumbterm struct{ r *bufio.Reader } func (r dumbterm) Prompt(p string) (string, error) { fmt.Print(p) - return r.r.ReadString('\n') + line, err := r.r.ReadString('\n') + return strings.TrimSuffix(line, "\n"), err } func (r dumbterm) PasswordPrompt(p string) (string, error) { @@ -182,30 +184,52 @@ func (self *jsre) exec(filename string) error { } func (self *jsre) interactive() { - for { - input, err := self.Prompt(self.ps1) - if err != nil { - break + // Read input lines. + prompt := make(chan string) + inputln := make(chan string) + go func() { + defer close(inputln) + for { + line, err := self.Prompt(<-prompt) + if err != nil { + return + } + inputln <- line } - if input == "" { - continue + }() + // Wait for Ctrl-C, too. + sig := make(chan os.Signal, 1) + signal.Notify(sig, os.Interrupt) + + defer func() { + if self.atexit != nil { + self.atexit() } - str += input + "\n" - self.setIndent() - if indentCount <= 0 { - if input == "exit" { - break + self.re.Stop(false) + }() + for { + prompt <- self.ps1 + select { + case <-sig: + fmt.Println("caught interrupt, exiting") + return + case input, ok := <-inputln: + if !ok || indentCount <= 0 && input == "exit" { + return + } + if input == "" { + continue + } + str += input + "\n" + self.setIndent() + if indentCount <= 0 { + hist := str[:len(str)-1] + self.AppendHistory(hist) + self.parseInput(str) + str = "" } - hist := str[:len(str)-1] - self.AppendHistory(hist) - self.parseInput(str) - str = "" } } - if self.atexit != nil { - self.atexit() - } - self.re.Stop(false) } func (self *jsre) withHistory(op func(*os.File)) { From 6019f1bb0a0744ffa52bf1ab93309c1a6dd9063c Mon Sep 17 00:00:00 2001 From: Jason Carver Date: Tue, 26 May 2015 18:54:56 -0700 Subject: [PATCH 05/34] deep-mining-log: only track non-stale blocks if you track stale blocks, then you quickly overflow your ring buffer in the local network case where you're mining every block and generating a lot of stales. --- miner/worker.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/miner/worker.go b/miner/worker.go index 6f618d632d..fac2e65683 100644 --- a/miner/worker.go +++ b/miner/worker.go @@ -248,12 +248,12 @@ func (self *worker) wait() { canonBlock := self.chain.GetBlockByNumber(block.NumberU64()) if canonBlock != nil && canonBlock.Hash() != block.Hash() { stale = "stale-" + } else { + self.current.localMinedBlocks = newLocalMinedBlock(block.Number().Uint64(), self.current.localMinedBlocks) } glog.V(logger.Info).Infof("🔨 Mined %sblock #%v (%x)", stale, block.Number(), block.Hash().Bytes()[:4]) - self.current.localMinedBlocks = newLocalMinedBlock(block.Number().Uint64(), self.current.localMinedBlocks) - jsonlogger.LogJson(&logger.EthMinerNewBlock{ BlockHash: block.Hash().Hex(), BlockNumber: block.Number(), From de12183d3824c25763e8996c14f8b5a52832fad6 Mon Sep 17 00:00:00 2001 From: Jason Carver Date: Tue, 26 May 2015 18:55:52 -0700 Subject: [PATCH 06/34] deep-mining-log: need ring buffer to be one bigger for all-blocks-mined case --- miner/worker.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/miner/worker.go b/miner/worker.go index fac2e65683..12ed656268 100644 --- a/miner/worker.go +++ b/miner/worker.go @@ -219,7 +219,7 @@ out: func newLocalMinedBlock(blockNumber uint64, prevMinedBlocks *uint64RingBuffer) (minedBlocks *uint64RingBuffer) { if prevMinedBlocks == nil { - minedBlocks = &uint64RingBuffer{next: 0, ints: make([]uint64, miningLogAtDepth)} + minedBlocks = &uint64RingBuffer{next: 0, ints: make([]uint64, miningLogAtDepth + 1)} } else { minedBlocks = prevMinedBlocks } From 14955bd4542e422daad5c6b39cfe07cdcb86b230 Mon Sep 17 00:00:00 2001 From: Gustav Simonsson Date: Wed, 27 May 2015 13:01:06 +0200 Subject: [PATCH 07/34] Revert "core: block.gasLimit - parent.gasLimit <= parent.gasLimit / GasLimitBoundDivisor" This reverts commit be2b0501b5832c0b49f07cdf2db597cc34450199. --- core/block_processor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/block_processor.go b/core/block_processor.go index e808c03da0..0377824079 100644 --- a/core/block_processor.go +++ b/core/block_processor.go @@ -302,7 +302,7 @@ func (sm *BlockProcessor) ValidateHeader(block, parent *types.Header, checkPow b a := new(big.Int).Sub(block.GasLimit, parent.GasLimit) a.Abs(a) b := new(big.Int).Div(parent.GasLimit, params.GasLimitBoundDivisor) - if !(a.Cmp(b) <= 0) || (block.GasLimit.Cmp(params.MinGasLimit) == -1) { + if !(a.Cmp(b) < 0) || (block.GasLimit.Cmp(params.MinGasLimit) == -1) { return fmt.Errorf("GasLimit check failed for block %v (%v > %v)", block.GasLimit, a, b) } From bf5f0b1d0cf9207c8958646f6ac16ffbaf89d7fa Mon Sep 17 00:00:00 2001 From: Gustav Simonsson Date: Wed, 27 May 2015 13:30:24 +0200 Subject: [PATCH 08/34] Update ValidateHeader comments --- core/block_processor.go | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/core/block_processor.go b/core/block_processor.go index 0377824079..e064cdd802 100644 --- a/core/block_processor.go +++ b/core/block_processor.go @@ -285,9 +285,8 @@ func (self *BlockProcessor) GetBlockReceipts(bhash common.Hash) (receipts types. } -// Validates the current block. Returns an error if the block was invalid, -// an uncle or anything that isn't on the current block chain. -// Validation validates easy over difficult (dagger takes longer time = difficult) +// See YP section 4.3.4. "Block Header Validity" +// Validates a block. Returns an error if the block is invalid. func (sm *BlockProcessor) ValidateHeader(block, parent *types.Header, checkPow bool) error { if big.NewInt(int64(len(block.Extra))).Cmp(params.MaximumExtraDataSize) == 1 { return fmt.Errorf("Block extra data too long (%d)", len(block.Extra)) @@ -298,7 +297,6 @@ func (sm *BlockProcessor) ValidateHeader(block, parent *types.Header, checkPow b return fmt.Errorf("Difficulty check failed for block %v, %v", block.Difficulty, expd) } - // block.gasLimit - parent.gasLimit <= parent.gasLimit / GasLimitBoundDivisor a := new(big.Int).Sub(block.GasLimit, parent.GasLimit) a.Abs(a) b := new(big.Int).Div(parent.GasLimit, params.GasLimitBoundDivisor) From 12650e16d3aa453a65417a79d79af5ce98cc4b01 Mon Sep 17 00:00:00 2001 From: obscuren Date: Wed, 27 May 2015 13:08:06 +0200 Subject: [PATCH 09/34] core, miner: fixed miner time issue and removed future blocks * Miner should no longer generate blocks with a time stamp less or equal than it's parent. * Future blocks are no longer processed and queued directly. Closes #1118 --- core/block_processor.go | 3 +-- miner/worker.go | 7 ++++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/core/block_processor.go b/core/block_processor.go index 0377824079..454c40e27a 100644 --- a/core/block_processor.go +++ b/core/block_processor.go @@ -306,8 +306,7 @@ func (sm *BlockProcessor) ValidateHeader(block, parent *types.Header, checkPow b return fmt.Errorf("GasLimit check failed for block %v (%v > %v)", block.GasLimit, a, b) } - // Allow future blocks up to 10 seconds - if int64(block.Time) > time.Now().Unix()+4 { + if int64(block.Time) > time.Now().Unix() { return BlockFutureErr } diff --git a/miner/worker.go b/miner/worker.go index 12ed656268..182b993981 100644 --- a/miner/worker.go +++ b/miner/worker.go @@ -287,8 +287,10 @@ func (self *worker) push() { func (self *worker) makeCurrent() { block := self.chain.NewBlock(self.coinbase) - if block.Time() == self.chain.CurrentBlock().Time() { - block.Header().Time++ + parent := self.chain.GetBlock(block.ParentHash()) + + if block.Time() <= parent.Time() { + block.Header().Time = parent.Header().Time + 1 } block.Header().Extra = self.extra @@ -312,7 +314,6 @@ func (self *worker) makeCurrent() { current.localMinedBlocks = self.current.localMinedBlocks } - parent := self.chain.GetBlock(current.block.ParentHash()) current.coinbase.SetGasPool(core.CalcGasLimit(parent)) self.current = current From 912ae80350c72a9cbefe60969fc9c88b1db302f3 Mon Sep 17 00:00:00 2001 From: obscuren Date: Wed, 27 May 2015 13:16:36 +0200 Subject: [PATCH 10/34] miner: Added 5 blocks wait in prep for #1067 --- miner/worker.go | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/miner/worker.go b/miner/worker.go index 182b993981..bc69551696 100644 --- a/miner/worker.go +++ b/miner/worker.go @@ -219,7 +219,7 @@ out: func newLocalMinedBlock(blockNumber uint64, prevMinedBlocks *uint64RingBuffer) (minedBlocks *uint64RingBuffer) { if prevMinedBlocks == nil { - minedBlocks = &uint64RingBuffer{next: 0, ints: make([]uint64, miningLogAtDepth + 1)} + minedBlocks = &uint64RingBuffer{next: 0, ints: make([]uint64, miningLogAtDepth+1)} } else { minedBlocks = prevMinedBlocks } @@ -244,15 +244,16 @@ func (self *worker) wait() { } self.mux.Post(core.NewMinedBlockEvent{block}) - var stale string + var stale, confirm string canonBlock := self.chain.GetBlockByNumber(block.NumberU64()) if canonBlock != nil && canonBlock.Hash() != block.Hash() { - stale = "stale-" + stale = "stale " } else { + confirm = "Wait 5 blocks for confirmation" self.current.localMinedBlocks = newLocalMinedBlock(block.Number().Uint64(), self.current.localMinedBlocks) } - glog.V(logger.Info).Infof("🔨 Mined %sblock #%v (%x)", stale, block.Number(), block.Hash().Bytes()[:4]) + glog.V(logger.Info).Infof("🔨 Mined %sblock (#%v / %x). %s", stale, block.Number(), block.Hash().Bytes()[:4], confirm) jsonlogger.LogJson(&logger.EthMinerNewBlock{ BlockHash: block.Hash().Hex(), From 3f91ee4ff824b38b7775f4e9f51a4160f5edc19d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A9ter=20Szil=C3=A1gyi?= Date: Wed, 27 May 2015 16:46:46 +0300 Subject: [PATCH 11/34] cmd/geth: expand admin.progress() to something meaningful --- cmd/geth/admin.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/geth/admin.go b/cmd/geth/admin.go index 8f9a009d74..f0be444c68 100644 --- a/cmd/geth/admin.go +++ b/cmd/geth/admin.go @@ -262,8 +262,8 @@ func (js *jsre) setHead(call otto.FunctionCall) otto.Value { } func (js *jsre) downloadProgress(call otto.FunctionCall) otto.Value { - current, max := js.ethereum.Downloader().Stats() - v, _ := call.Otto.ToValue(fmt.Sprintf("%d/%d", current, max)) + pending, cached := js.ethereum.Downloader().Stats() + v, _ := call.Otto.ToValue(map[string]interface{}{"pending": pending, "cached": cached}) return v } From 759571681604e95b876a03e274c9588529ab204e Mon Sep 17 00:00:00 2001 From: obscuren Date: Wed, 27 May 2015 17:01:28 +0200 Subject: [PATCH 12/34] core: adjust gas calculation --- core/chain_manager.go | 1 + 1 file changed, 1 insertion(+) diff --git a/core/chain_manager.go b/core/chain_manager.go index ec479db25c..ee73145c10 100644 --- a/core/chain_manager.go +++ b/core/chain_manager.go @@ -69,6 +69,7 @@ func CalcGasLimit(parent *types.Block) *big.Int { gl := new(big.Int).Sub(parent.GasLimit(), decay) gl = gl.Add(gl, contrib) + gl = gl.Add(gl, big.NewInt(1)) gl = common.BigMax(gl, params.MinGasLimit) if gl.Cmp(params.GenesisGasLimit) < 0 { From 5235e01b8dac36a847723fc83cfd5ff65c903215 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A9ter=20Szil=C3=A1gyi?= Date: Wed, 27 May 2015 18:58:51 +0300 Subject: [PATCH 13/34] eth: hard disconnect if a peer is flaky --- eth/handler.go | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/eth/handler.go b/eth/handler.go index 777a9c7c0e..8092a5f714 100644 --- a/eth/handler.go +++ b/eth/handler.go @@ -93,14 +93,22 @@ func NewProtocolManager(protocolVersion, networkId int, mux *event.TypeMux, txpo } func (pm *ProtocolManager) removePeer(id string) { - // Unregister the peer from the downloader - pm.downloader.UnregisterPeer(id) + // Short circuit if the peer was already removed + peer := pm.peers.Peer(id) + if peer == nil { + return + } + glog.V(logger.Debug).Infoln("Removing peer", id) - // Remove the peer from the Ethereum peer set too - glog.V(logger.Detail).Infoln("Removing peer", id) + // Unregister the peer from the downloader and Ethereum peer set + pm.downloader.UnregisterPeer(id) if err := pm.peers.Unregister(id); err != nil { glog.V(logger.Error).Infoln("Removal failed:", err) } + // Hard disconnect at the networking layer + if peer != nil { + peer.Peer.Disconnect(p2p.DiscUselessPeer) + } } func (pm *ProtocolManager) Start() { From 020006a8ede0463346ad3d4ae318d299eee63fb1 Mon Sep 17 00:00:00 2001 From: obscuren Date: Wed, 27 May 2015 18:03:16 +0200 Subject: [PATCH 14/34] common, ethdb: removed caching and LastTD --- common/db.go | 1 - ethdb/database.go | 64 +++++------------------------------------------ 2 files changed, 6 insertions(+), 59 deletions(-) diff --git a/common/db.go b/common/db.go index ae13c75573..c12a2cfb03 100644 --- a/common/db.go +++ b/common/db.go @@ -5,7 +5,6 @@ type Database interface { Put(key []byte, value []byte) Get(key []byte) ([]byte, error) Delete(key []byte) error - LastKnownTD() []byte Close() Flush() error } diff --git a/ethdb/database.go b/ethdb/database.go index 9bf09467b4..019645cedd 100644 --- a/ethdb/database.go +++ b/ethdb/database.go @@ -1,8 +1,6 @@ package ethdb import ( - "sync" - "github.com/ethereum/go-ethereum/compression/rle" "github.com/ethereum/go-ethereum/logger" "github.com/ethereum/go-ethereum/logger/glog" @@ -15,14 +13,10 @@ import ( var OpenFileLimit = 64 type LDBDatabase struct { + // filename for reporting fn string - - mu sync.Mutex + // LevelDB instance db *leveldb.DB - - queue map[string][]byte - - quit chan struct{} } // NewLDBDatabase returns a LevelDB wrapped object. LDBDatabase does not persist data by @@ -40,85 +34,39 @@ func NewLDBDatabase(file string) (*LDBDatabase, error) { return nil, err } database := &LDBDatabase{ - fn: file, - db: db, - quit: make(chan struct{}), + fn: file, + db: db, } - database.makeQueue() return database, nil } -func (self *LDBDatabase) makeQueue() { - self.queue = make(map[string][]byte) -} - // Put puts the given key / value to the queue func (self *LDBDatabase) Put(key []byte, value []byte) { - self.mu.Lock() - defer self.mu.Unlock() - - self.queue[string(key)] = value + self.db.Put(key, rle.Compress(value), nil) } // Get returns the given key if it's present. func (self *LDBDatabase) Get(key []byte) ([]byte, error) { - self.mu.Lock() - defer self.mu.Unlock() - - // Check queue first - if dat, ok := self.queue[string(key)]; ok { - return dat, nil - } - dat, err := self.db.Get(key, nil) if err != nil { return nil, err } - return rle.Decompress(dat) } // Delete deletes the key from the queue and database func (self *LDBDatabase) Delete(key []byte) error { - self.mu.Lock() - defer self.mu.Unlock() - - // make sure it's not in the queue - delete(self.queue, string(key)) - return self.db.Delete(key, nil) } -func (self *LDBDatabase) LastKnownTD() []byte { - data, _ := self.Get([]byte("LTD")) - - if len(data) == 0 { - data = []byte{0x0} - } - - return data -} - func (self *LDBDatabase) NewIterator() iterator.Iterator { return self.db.NewIterator(nil, nil) } // Flush flushes out the queue to leveldb func (self *LDBDatabase) Flush() error { - self.mu.Lock() - defer self.mu.Unlock() - - batch := new(leveldb.Batch) - - for key, value := range self.queue { - batch.Put([]byte(key), rle.Compress(value)) - } - self.makeQueue() // reset the queue - - glog.V(logger.Detail).Infoln("Flush database: ", self.fn) - - return self.db.Write(batch, nil) + return nil } func (self *LDBDatabase) Close() { From e3253b5d5e65bfb6944ddaabd3c79400fbe06ef8 Mon Sep 17 00:00:00 2001 From: obscuren Date: Fri, 22 May 2015 22:44:51 +0200 Subject: [PATCH 15/34] core: fixed an issue with storing receipts --- cmd/mist/assets/examples/coin.html | 2 +- core/block_processor.go | 73 ++++++++++++++++++++---------- core/block_processor_test.go | 32 +++++++++++++ core/state/log.go | 21 ++++++--- core/types/receipt.go | 29 ++++++++++++ 5 files changed, 124 insertions(+), 33 deletions(-) diff --git a/cmd/mist/assets/examples/coin.html b/cmd/mist/assets/examples/coin.html index e6baf45793..4fe8e7fa2a 100644 --- a/cmd/mist/assets/examples/coin.html +++ b/cmd/mist/assets/examples/coin.html @@ -102,7 +102,7 @@ window.filter = filter; var amount = parseInt( value.value ); console.log("transact: ", to.value, " => ", amount) - contract.sendTransaction({from: eth.accounts[0]}).send( to.value, amount ); + contract.send.sendTransaction(to.value, amount ,{from: eth.accounts[0]}); to.value = ""; value.value = ""; diff --git a/core/block_processor.go b/core/block_processor.go index 6cd1c8aa35..ca205ee863 100644 --- a/core/block_processor.go +++ b/core/block_processor.go @@ -40,11 +40,6 @@ type BlockProcessor struct { txpool *TxPool - // The last attempted block is mainly used for debugging purposes - // This does not have to be a valid block and will be set during - // 'Process' & canonical validation. - lastAttemptedBlock *types.Block - events event.Subscription eventMux *event.TypeMux @@ -188,8 +183,6 @@ func (sm *BlockProcessor) Process(block *types.Block) (logs state.Logs, err erro } func (sm *BlockProcessor) processWithParent(block, parent *types.Block) (logs state.Logs, err error) { - sm.lastAttemptedBlock = block - // Create a new state based on the parent's root (e.g., create copy) state := state.New(parent.Root(), sm.db) @@ -255,6 +248,12 @@ func (sm *BlockProcessor) processWithParent(block, parent *types.Block) (logs st return } + // store the receipts + err = putReceipts(sm.extraDb, block.Hash(), receipts) + if err != nil { + return nil, err + } + // Calculate the td for this block //td = CalculateTD(block, parent) // Sync the current block's state to the database @@ -268,23 +267,9 @@ func (sm *BlockProcessor) processWithParent(block, parent *types.Block) (logs st putTx(sm.extraDb, tx, block, uint64(i)) } - receiptsRlp := block.Receipts().RlpEncode() - sm.extraDb.Put(append(receiptsPre, block.Hash().Bytes()...), receiptsRlp) - return state.Logs(), nil } -func (self *BlockProcessor) GetBlockReceipts(bhash common.Hash) (receipts types.Receipts, err error) { - var rdata []byte - rdata, err = self.extraDb.Get(append(receiptsPre, bhash[:]...)) - - if err == nil { - err = rlp.DecodeBytes(rdata, &receipts) - } - return - -} - // See YP section 4.3.4. "Block Header Validity" // Validates a block. Returns an error if the block is invalid. func (sm *BlockProcessor) ValidateHeader(block, parent *types.Header, checkPow bool) error { @@ -391,13 +376,25 @@ func (sm *BlockProcessor) VerifyUncles(statedb *state.StateDB, block, parent *ty return nil } +// GetBlockReceipts returns the receipts beloniging to the block hash +func (sm *BlockProcessor) GetBlockReceipts(bhash common.Hash) (receipts types.Receipts, err error) { + return getBlockReceipts(sm.extraDb, bhash) +} + +// GetLogs returns the logs of the given block. This method is using a two step approach +// where it tries to get it from the (updated) method which gets them from the receipts or +// the depricated way by re-processing the block. func (sm *BlockProcessor) GetLogs(block *types.Block) (logs state.Logs, err error) { - if !sm.bc.HasBlock(block.Header().ParentHash) { - return nil, ParentError(block.Header().ParentHash) + receipts, err := sm.GetBlockReceipts(block.Hash()) + if err == nil && len(receipts) > 0 { + // coalesce logs + for _, receipt := range receipts { + logs = append(logs, receipt.Logs()...) + } + return } - sm.lastAttemptedBlock = block - + // TODO: remove backward compatibility var ( parent = sm.bc.GetBlock(block.Header().ParentHash) state = state.New(parent.Root(), sm.db) @@ -408,6 +405,16 @@ func (sm *BlockProcessor) GetLogs(block *types.Block) (logs state.Logs, err erro return state.Logs(), nil } +func getBlockReceipts(db common.Database, bhash common.Hash) (receipts types.Receipts, err error) { + var rdata []byte + rdata, err = db.Get(append(receiptsPre, bhash[:]...)) + + if err == nil { + err = rlp.DecodeBytes(rdata, &receipts) + } + return +} + func putTx(db common.Database, tx *types.Transaction, block *types.Block, i uint64) { rlpEnc, err := rlp.EncodeToBytes(tx) if err != nil { @@ -431,3 +438,19 @@ func putTx(db common.Database, tx *types.Transaction, block *types.Block, i uint } db.Put(append(tx.Hash().Bytes(), 0x0001), rlpMeta) } + +func putReceipts(db common.Database, hash common.Hash, receipts types.Receipts) error { + storageReceipts := make([]*types.ReceiptForStorage, len(receipts)) + for i, receipt := range receipts { + storageReceipts[i] = (*types.ReceiptForStorage)(receipt) + } + + bytes, err := rlp.EncodeToBytes(storageReceipts) + if err != nil { + return err + } + + db.Put(append(receiptsPre, hash[:]...), bytes) + + return nil +} diff --git a/core/block_processor_test.go b/core/block_processor_test.go index e0aa5fb4c4..72b173a71d 100644 --- a/core/block_processor_test.go +++ b/core/block_processor_test.go @@ -5,6 +5,8 @@ import ( "testing" "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/state" + "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/ethdb" "github.com/ethereum/go-ethereum/event" "github.com/ethereum/go-ethereum/pow/ezp" @@ -35,3 +37,33 @@ func TestNumber(t *testing.T) { t.Errorf("didn't expect block number error") } } + +func TestPutReceipt(t *testing.T) { + db, _ := ethdb.NewMemDatabase() + + var addr common.Address + addr[0] = 1 + var hash common.Hash + hash[0] = 2 + + receipt := new(types.Receipt) + receipt.SetLogs(state.Logs{&state.Log{ + Address: addr, + Topics: []common.Hash{hash}, + Data: []byte("hi"), + Number: 42, + TxHash: hash, + TxIndex: 0, + BlockHash: hash, + Index: 0, + }}) + + putReceipts(db, hash, types.Receipts{receipt}) + receipts, err := getBlockReceipts(db, hash) + if err != nil { + t.Error("got err:", err) + } + if len(receipts) != 1 { + t.Error("expected to get 1 receipt, got", len(receipts)) + } +} diff --git a/core/state/log.go b/core/state/log.go index a7aa784e28..8829770612 100644 --- a/core/state/log.go +++ b/core/state/log.go @@ -29,15 +29,22 @@ func (self *Log) EncodeRLP(w io.Writer) error { } func (self *Log) String() string { - return fmt.Sprintf(`log: %x %x %x`, self.Address, self.Topics, self.Data) + return fmt.Sprintf(`log: %x %x %x %x %d %x %d`, self.Address, self.Topics, self.Data, self.TxHash, self.TxIndex, self.BlockHash, self.Index) } type Logs []*Log -func (self Logs) String() (ret string) { - for _, log := range self { - ret += fmt.Sprintf("%v", log) - } - - return "[" + ret + "]" +type LogForStorage Log + +func (self *LogForStorage) EncodeRLP(w io.Writer) error { + return rlp.Encode(w, []interface{}{ + self.Address, + self.Topics, + self.Data, + self.Number, + self.TxHash, + self.TxIndex, + self.BlockHash, + self.Index, + }) } diff --git a/core/types/receipt.go b/core/types/receipt.go index 414e4d364d..6b4024adae 100644 --- a/core/types/receipt.go +++ b/core/types/receipt.go @@ -26,10 +26,39 @@ func (self *Receipt) SetLogs(logs state.Logs) { self.logs = logs } +func (self *Receipt) Logs() state.Logs { + return self.logs +} + func (self *Receipt) EncodeRLP(w io.Writer) error { return rlp.Encode(w, []interface{}{self.PostState, self.CumulativeGasUsed, self.Bloom, self.logs}) } +func (self *Receipt) DecodeRLP(s *rlp.Stream) error { + var r struct { + PostState []byte + CumulativeGasUsed *big.Int + Bloom Bloom + Logs state.Logs + } + if err := s.Decode(&r); err != nil { + return err + } + self.PostState, self.CumulativeGasUsed, self.Bloom, self.logs = r.PostState, r.CumulativeGasUsed, r.Bloom, r.Logs + + return nil +} + +type ReceiptForStorage Receipt + +func (self *ReceiptForStorage) EncodeRLP(w io.Writer) error { + storageLogs := make([]*state.LogForStorage, len(self.logs)) + for i, log := range self.logs { + storageLogs[i] = (*state.LogForStorage)(log) + } + return rlp.Encode(w, []interface{}{self.PostState, self.CumulativeGasUsed, self.Bloom, storageLogs}) +} + func (self *Receipt) RlpEncode() []byte { bytes, err := rlp.EncodeToBytes(self) if err != nil { From 3b9808f23ca4eb1621a92aad80de5c89269f17fe Mon Sep 17 00:00:00 2001 From: Felix Lange Date: Wed, 27 May 2015 13:29:34 +0200 Subject: [PATCH 16/34] cmd/geth, cmd/utils: don't use Ethereum for import, export and upgradedb The blockchain commands don't need the full stack. With this change, p2p, miner, downloader, etc are no longer started for blockchain operations. --- cmd/geth/main.go | 113 +++++++++++++-------------------------------- cmd/utils/cmd.go | 49 +++++++++----------- cmd/utils/flags.go | 44 ++++++++---------- 3 files changed, 72 insertions(+), 134 deletions(-) diff --git a/cmd/geth/main.go b/cmd/geth/main.go index 0cbf8e41a2..f849063fae 100644 --- a/cmd/geth/main.go +++ b/cmd/geth/main.go @@ -41,6 +41,7 @@ import ( "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/eth" "github.com/ethereum/go-ethereum/logger" + "github.com/ethereum/go-ethereum/logger/glog" "github.com/mattn/go-colorable" "github.com/mattn/go-isatty" ) @@ -282,17 +283,12 @@ JavaScript API. See https://github.com/ethereum/go-ethereum/wiki/Javascipt-Conso utils.SolcPathFlag, } app.Before = func(ctx *cli.Context) error { + utils.SetupLogger(ctx) if ctx.GlobalBool(utils.PProfEanbledFlag.Name) { utils.StartPProf(ctx) } return nil } - - // missing: - // flag.StringVar(&ConfigFile, "conf", defaultConfigFile, "config file") - // flag.BoolVar(&DiffTool, "difftool", false, "creates output for diff'ing. Sets LogLevel=0") - // flag.StringVar(&DiffType, "diff", "all", "sets the level of diff output [vm, all]. Has no effect if difftool=false") - } func main() { @@ -516,53 +512,25 @@ func importchain(ctx *cli.Context) { if len(ctx.Args()) != 1 { utils.Fatalf("This command requires an argument.") } - - cfg := utils.MakeEthConfig(ClientIdentifier, Version, ctx) - cfg.SkipBcVersionCheck = true - - ethereum, err := eth.New(cfg) - if err != nil { - utils.Fatalf("%v\n", err) - } - - chainmgr := ethereum.ChainManager() + chain, blockDB, stateDB, extraDB := utils.GetChain(ctx) start := time.Now() - err = utils.ImportChain(chainmgr, ctx.Args().First()) - if err != nil { + if err := utils.ImportChain(chain, ctx.Args().First()); err != nil { utils.Fatalf("Import error: %v\n", err) } - - // force database flush - ethereum.BlockDb().Flush() - ethereum.StateDb().Flush() - ethereum.ExtraDb().Flush() - + flushAll(blockDB, stateDB, extraDB) fmt.Printf("Import done in %v", time.Since(start)) - - return } func exportchain(ctx *cli.Context) { if len(ctx.Args()) != 1 { utils.Fatalf("This command requires an argument.") } - - cfg := utils.MakeEthConfig(ClientIdentifier, nodeNameVersion, ctx) - cfg.SkipBcVersionCheck = true - - ethereum, err := eth.New(cfg) - if err != nil { - utils.Fatalf("%v\n", err) - } - - chainmgr := ethereum.ChainManager() + chain, _, _, _ := utils.GetChain(ctx) start := time.Now() - err = utils.ExportChain(chainmgr, ctx.Args().First()) - if err != nil { + if err := utils.ExportChain(chain, ctx.Args().First()); err != nil { utils.Fatalf("Export error: %v\n", err) } fmt.Printf("Export done in %v", time.Since(start)) - return } func removeDb(ctx *cli.Context) { @@ -585,76 +553,54 @@ func removeDb(ctx *cli.Context) { } func upgradeDb(ctx *cli.Context) { - fmt.Println("Upgrade blockchain DB") + glog.Infoln("Upgrading blockchain database") - cfg := utils.MakeEthConfig(ClientIdentifier, Version, ctx) - cfg.SkipBcVersionCheck = true - - ethereum, err := eth.New(cfg) - if err != nil { - utils.Fatalf("%v\n", err) - } - - v, _ := ethereum.BlockDb().Get([]byte("BlockchainVersion")) + chain, blockDB, stateDB, extraDB := utils.GetChain(ctx) + v, _ := blockDB.Get([]byte("BlockchainVersion")) bcVersion := int(common.NewValue(v).Uint()) - if bcVersion == 0 { bcVersion = core.BlockChainVersion } + // Export the current chain. filename := fmt.Sprintf("blockchain_%d_%s.chain", bcVersion, time.Now().Format("20060102_150405")) exportFile := filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), filename) - - err = utils.ExportChain(ethereum.ChainManager(), exportFile) - if err != nil { + if err := utils.ExportChain(chain, exportFile); err != nil { utils.Fatalf("Unable to export chain for reimport %s\n", err) } - - ethereum.BlockDb().Close() - ethereum.StateDb().Close() - ethereum.ExtraDb().Close() - + flushAll(blockDB, stateDB, extraDB) os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "blockchain")) os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "state")) - ethereum, err = eth.New(cfg) - if err != nil { - utils.Fatalf("%v\n", err) - } - - ethereum.BlockDb().Put([]byte("BlockchainVersion"), common.NewValue(core.BlockChainVersion).Bytes()) - - err = utils.ImportChain(ethereum.ChainManager(), exportFile) + // Import the chain file. + chain, blockDB, stateDB, extraDB = utils.GetChain(ctx) + blockDB.Put([]byte("BlockchainVersion"), common.NewValue(core.BlockChainVersion).Bytes()) + err := utils.ImportChain(chain, exportFile) + flushAll(blockDB, stateDB, extraDB) if err != nil { utils.Fatalf("Import error %v (a backup is made in %s, use the import command to import it)\n", err, exportFile) + } else { + os.Remove(exportFile) + glog.Infoln("Import finished") } - - // force database flush - ethereum.BlockDb().Flush() - ethereum.StateDb().Flush() - ethereum.ExtraDb().Flush() - - os.Remove(exportFile) - - fmt.Println("Import finished") } func dump(ctx *cli.Context) { - chainmgr, _, stateDb := utils.GetChain(ctx) + chain, _, stateDB, _ := utils.GetChain(ctx) for _, arg := range ctx.Args() { var block *types.Block if hashish(arg) { - block = chainmgr.GetBlock(common.HexToHash(arg)) + block = chain.GetBlock(common.HexToHash(arg)) } else { num, _ := strconv.Atoi(arg) - block = chainmgr.GetBlockByNumber(uint64(num)) + block = chain.GetBlockByNumber(uint64(num)) } if block == nil { fmt.Println("{}") utils.Fatalf("block not found") } else { - statedb := state.New(block.Root(), stateDb) - fmt.Printf("%s\n", statedb.Dump()) + state := state.New(block.Root(), stateDB) + fmt.Printf("%s\n", state.Dump()) } } } @@ -707,3 +653,10 @@ func hashish(x string) bool { _, err := strconv.Atoi(x) return err != nil } + +func flushAll(dbs ...common.Database) { + for _, db := range dbs { + db.Flush() + db.Close() + } +} diff --git a/cmd/utils/cmd.go b/cmd/utils/cmd.go index 39b4e46da2..550ac1c514 100644 --- a/cmd/utils/cmd.go +++ b/cmd/utils/cmd.go @@ -167,7 +167,7 @@ func FormatTransactionData(data string) []byte { } func ImportChain(chainmgr *core.ChainManager, fn string) error { - fmt.Printf("importing blockchain '%s'\n", fn) + glog.Infoln("Importing blockchain", fn) fh, err := os.OpenFile(fn, os.O_RDONLY, os.ModePerm) if err != nil { return err @@ -176,43 +176,36 @@ func ImportChain(chainmgr *core.ChainManager, fn string) error { chainmgr.Reset() stream := rlp.NewStream(fh, 0) - var i, n int batchSize := 2500 blocks := make(types.Blocks, batchSize) - - for ; ; i++ { - var b types.Block - if err := stream.Decode(&b); err == io.EOF { - break - } else if err != nil { - return fmt.Errorf("at block %d: %v", i, err) - } - - blocks[n] = &b - n++ - - if n == batchSize { - if _, err := chainmgr.InsertChain(blocks); err != nil { - return fmt.Errorf("invalid block %v", err) + n := 0 + for { + // Load a batch of RLP blocks. + i := 0 + for ; i < batchSize; i++ { + var b types.Block + if err := stream.Decode(&b); err == io.EOF { + break + } else if err != nil { + return fmt.Errorf("at block %d: %v", n, err) } - n = 0 - blocks = make(types.Blocks, batchSize) + blocks[i] = &b + n++ } - } - - if n > 0 { - if _, err := chainmgr.InsertChain(blocks[:n]); err != nil { - return fmt.Errorf("invalid block %v", err) + if i == 0 { + break + } + // Import the batch. + if _, err := chainmgr.InsertChain(blocks[:i]); err != nil { + return fmt.Errorf("invalid block %d: %v", n, err) } } - - fmt.Printf("imported %d blocks\n", i) return nil } func ExportChain(chainmgr *core.ChainManager, fn string) error { - fmt.Printf("exporting blockchain '%s'\n", fn) + glog.Infoln("Exporting blockchain to", fn) fh, err := os.OpenFile(fn, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, os.ModePerm) if err != nil { return err @@ -221,6 +214,6 @@ func ExportChain(chainmgr *core.ChainManager, fn string) error { if err := chainmgr.Export(fh); err != nil { return err } - fmt.Printf("exported blockchain\n") + glog.Infoln("Exported blockchain to", fn) return nil } diff --git a/cmd/utils/flags.go b/cmd/utils/flags.go index 155110ddce..176a546f1d 100644 --- a/cmd/utils/flags.go +++ b/cmd/utils/flags.go @@ -283,20 +283,10 @@ func GetNodeKey(ctx *cli.Context) (key *ecdsa.PrivateKey) { } func MakeEthConfig(clientID, version string, ctx *cli.Context) *eth.Config { - // Set verbosity on glog - glog.SetV(ctx.GlobalInt(VerbosityFlag.Name)) - glog.CopyStandardLogTo("INFO") - // Set the log type - //glog.SetToStderr(ctx.GlobalBool(LogToStdErrFlag.Name)) - glog.SetToStderr(true) - // Set the log dir - glog.SetLogDir(ctx.GlobalString(LogFileFlag.Name)) - customName := ctx.GlobalString(IdentityFlag.Name) if len(customName) > 0 { clientID += "/" + customName } - return ð.Config{ Name: common.MakeName(clientID, version), DataDir: ctx.GlobalString(DataDirFlag.Name), @@ -327,32 +317,34 @@ func MakeEthConfig(clientID, version string, ctx *cli.Context) *eth.Config { } } -func GetChain(ctx *cli.Context) (*core.ChainManager, common.Database, common.Database) { - dataDir := ctx.GlobalString(DataDirFlag.Name) +// SetupLogger configures glog from the logging-related command line flags. +func SetupLogger(ctx *cli.Context) { + glog.SetV(ctx.GlobalInt(VerbosityFlag.Name)) + glog.CopyStandardLogTo("INFO") + glog.SetToStderr(true) + glog.SetLogDir(ctx.GlobalString(LogFileFlag.Name)) +} - blockDb, err := ethdb.NewLDBDatabase(filepath.Join(dataDir, "blockchain")) - if err != nil { +func GetChain(ctx *cli.Context) (chain *core.ChainManager, blockDB, stateDB, extraDB common.Database) { + dd := ctx.GlobalString(DataDirFlag.Name) + var err error + if blockDB, err = ethdb.NewLDBDatabase(filepath.Join(dd, "blockchain")); err != nil { Fatalf("Could not open database: %v", err) } - - stateDb, err := ethdb.NewLDBDatabase(filepath.Join(dataDir, "state")) - if err != nil { + if stateDB, err = ethdb.NewLDBDatabase(filepath.Join(dd, "state")); err != nil { Fatalf("Could not open database: %v", err) } - - extraDb, err := ethdb.NewLDBDatabase(filepath.Join(dataDir, "extra")) - if err != nil { + if extraDB, err = ethdb.NewLDBDatabase(filepath.Join(dd, "extra")); err != nil { Fatalf("Could not open database: %v", err) } eventMux := new(event.TypeMux) pow := ethash.New() - chainManager := core.NewChainManager(blockDb, stateDb, pow, eventMux) - txPool := core.NewTxPool(eventMux, chainManager.State, chainManager.GasLimit) - blockProcessor := core.NewBlockProcessor(stateDb, extraDb, pow, txPool, chainManager, eventMux) - chainManager.SetProcessor(blockProcessor) - - return chainManager, blockDb, stateDb + chain = core.NewChainManager(blockDB, stateDB, pow, eventMux) + txpool := core.NewTxPool(eventMux, chain.State, chain.GasLimit) + proc := core.NewBlockProcessor(stateDB, extraDB, pow, txpool, chain, eventMux) + chain.SetProcessor(proc) + return chain, blockDB, stateDB, extraDB } func GetAccountManager(ctx *cli.Context) *accounts.Manager { From 62671c93c4672a8853c1193286d59f60e7065bbe Mon Sep 17 00:00:00 2001 From: Felix Lange Date: Wed, 27 May 2015 13:34:14 +0200 Subject: [PATCH 17/34] cmd/mist: use utils.SetupLogger --- cmd/mist/main.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/cmd/mist/main.go b/cmd/mist/main.go index 8365fcec15..e8ae9b5e1c 100644 --- a/cmd/mist/main.go +++ b/cmd/mist/main.go @@ -86,6 +86,10 @@ func init() { utils.BlockchainVersionFlag, utils.NetworkIdFlag, } + app.Before = func(ctx *cli.Context) error { + utils.SetupLogger(ctx) + return nil + } } func main() { From 651030c98d0173db272aaee814c99f0a664d992b Mon Sep 17 00:00:00 2001 From: Felix Lange Date: Wed, 27 May 2015 13:43:49 +0200 Subject: [PATCH 18/34] cmd/geth: move blockchain commands to chaincmd.go --- cmd/geth/{blocktest.go => blocktestcmd.go} | 2 +- cmd/geth/chaincmd.go | 159 +++++++++++++++++++++ cmd/geth/main.go | 153 +------------------- 3 files changed, 167 insertions(+), 147 deletions(-) rename cmd/geth/{blocktest.go => blocktestcmd.go} (98%) create mode 100644 cmd/geth/chaincmd.go diff --git a/cmd/geth/blocktest.go b/cmd/geth/blocktestcmd.go similarity index 98% rename from cmd/geth/blocktest.go rename to cmd/geth/blocktestcmd.go index 81a64b5f24..f4dcb0286c 100644 --- a/cmd/geth/blocktest.go +++ b/cmd/geth/blocktestcmd.go @@ -12,7 +12,7 @@ import ( "github.com/ethereum/go-ethereum/tests" ) -var blocktestCmd = cli.Command{ +var blocktestCommand = cli.Command{ Action: runBlockTest, Name: "blocktest", Usage: `loads a block test file`, diff --git a/cmd/geth/chaincmd.go b/cmd/geth/chaincmd.go new file mode 100644 index 0000000000..e17d1dc9b0 --- /dev/null +++ b/cmd/geth/chaincmd.go @@ -0,0 +1,159 @@ +package main + +import ( + "fmt" + "os" + "path/filepath" + "strconv" + "time" + + "github.com/codegangsta/cli" + "github.com/ethereum/go-ethereum/cmd/utils" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core" + "github.com/ethereum/go-ethereum/core/state" + "github.com/ethereum/go-ethereum/core/types" + "github.com/ethereum/go-ethereum/logger/glog" +) + +var ( + importCommand = cli.Command{ + Action: importChain, + Name: "import", + Usage: `import a blockchain file`, + } + exportCommand = cli.Command{ + Action: exportChain, + Name: "export", + Usage: `export blockchain into file`, + } + upgradedbCommand = cli.Command{ + Action: upgradeDB, + Name: "upgradedb", + Usage: "upgrade chainblock database", + } + removedbCommand = cli.Command{ + Action: removeDB, + Name: "removedb", + Usage: "Remove blockchain and state databases", + } + dumpCommand = cli.Command{ + Action: dump, + Name: "dump", + Usage: `dump a specific block from storage`, + Description: ` +The arguments are interpreted as block numbers or hashes. +Use "ethereum dump 0" to dump the genesis block. +`, + } +) + +func importChain(ctx *cli.Context) { + if len(ctx.Args()) != 1 { + utils.Fatalf("This command requires an argument.") + } + chain, blockDB, stateDB, extraDB := utils.GetChain(ctx) + start := time.Now() + if err := utils.ImportChain(chain, ctx.Args().First()); err != nil { + utils.Fatalf("Import error: %v\n", err) + } + flushAll(blockDB, stateDB, extraDB) + fmt.Printf("Import done in %v", time.Since(start)) +} + +func exportChain(ctx *cli.Context) { + if len(ctx.Args()) != 1 { + utils.Fatalf("This command requires an argument.") + } + chain, _, _, _ := utils.GetChain(ctx) + start := time.Now() + if err := utils.ExportChain(chain, ctx.Args().First()); err != nil { + utils.Fatalf("Export error: %v\n", err) + } + fmt.Printf("Export done in %v", time.Since(start)) +} + +func removeDB(ctx *cli.Context) { + confirm, err := utils.PromptConfirm("Remove local databases?") + if err != nil { + utils.Fatalf("%v", err) + } + + if confirm { + fmt.Println("Removing chain and state databases...") + start := time.Now() + + os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "blockchain")) + os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "state")) + + fmt.Printf("Removed in %v\n", time.Since(start)) + } else { + fmt.Println("Operation aborted") + } +} + +func upgradeDB(ctx *cli.Context) { + glog.Infoln("Upgrading blockchain database") + + chain, blockDB, stateDB, extraDB := utils.GetChain(ctx) + v, _ := blockDB.Get([]byte("BlockchainVersion")) + bcVersion := int(common.NewValue(v).Uint()) + if bcVersion == 0 { + bcVersion = core.BlockChainVersion + } + + // Export the current chain. + filename := fmt.Sprintf("blockchain_%d_%s.chain", bcVersion, time.Now().Format("20060102_150405")) + exportFile := filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), filename) + if err := utils.ExportChain(chain, exportFile); err != nil { + utils.Fatalf("Unable to export chain for reimport %s\n", err) + } + flushAll(blockDB, stateDB, extraDB) + os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "blockchain")) + os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "state")) + + // Import the chain file. + chain, blockDB, stateDB, extraDB = utils.GetChain(ctx) + blockDB.Put([]byte("BlockchainVersion"), common.NewValue(core.BlockChainVersion).Bytes()) + err := utils.ImportChain(chain, exportFile) + flushAll(blockDB, stateDB, extraDB) + if err != nil { + utils.Fatalf("Import error %v (a backup is made in %s, use the import command to import it)\n", err, exportFile) + } else { + os.Remove(exportFile) + glog.Infoln("Import finished") + } +} + +func dump(ctx *cli.Context) { + chain, _, stateDB, _ := utils.GetChain(ctx) + for _, arg := range ctx.Args() { + var block *types.Block + if hashish(arg) { + block = chain.GetBlock(common.HexToHash(arg)) + } else { + num, _ := strconv.Atoi(arg) + block = chain.GetBlockByNumber(uint64(num)) + } + if block == nil { + fmt.Println("{}") + utils.Fatalf("block not found") + } else { + state := state.New(block.Root(), stateDB) + fmt.Printf("%s\n", state.Dump()) + } + } +} + +// hashish returns true for strings that look like hashes. +func hashish(x string) bool { + _, err := strconv.Atoi(x) + return err != nil +} + +func flushAll(dbs ...common.Database) { + for _, db := range dbs { + db.Flush() + db.Close() + } +} diff --git a/cmd/geth/main.go b/cmd/geth/main.go index f849063fae..9bf0fe6100 100644 --- a/cmd/geth/main.go +++ b/cmd/geth/main.go @@ -24,28 +24,23 @@ import ( "fmt" "io" "io/ioutil" + _ "net/http/pprof" "os" "path/filepath" "runtime" "strconv" "strings" - "time" "github.com/codegangsta/cli" "github.com/ethereum/ethash" "github.com/ethereum/go-ethereum/accounts" "github.com/ethereum/go-ethereum/cmd/utils" "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core" - "github.com/ethereum/go-ethereum/core/state" - "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/eth" "github.com/ethereum/go-ethereum/logger" - "github.com/ethereum/go-ethereum/logger/glog" "github.com/mattn/go-colorable" "github.com/mattn/go-isatty" ) -import _ "net/http/pprof" const ( ClientIdentifier = "Geth" @@ -69,7 +64,12 @@ func init() { app.Action = run app.HideVersion = true // we have a command to print the version app.Commands = []cli.Command{ - blocktestCmd, + blocktestCommand, + importCommand, + exportCommand, + upgradedbCommand, + removedbCommand, + dumpCommand, { Action: makedag, Name: "makedag", @@ -194,15 +194,6 @@ nodes. }, }, }, - { - Action: dump, - Name: "dump", - Usage: `dump a specific block from storage`, - Description: ` -The arguments are interpreted as block numbers or hashes. -Use "ethereum dump 0" to dump the genesis block. -`, - }, { Action: console, Name: "console", @@ -222,26 +213,6 @@ The JavaScript VM exposes a node admin interface as well as the Ðapp JavaScript API. See https://github.com/ethereum/go-ethereum/wiki/Javascipt-Console `, }, - { - Action: importchain, - Name: "import", - Usage: `import a blockchain file`, - }, - { - Action: exportchain, - Name: "export", - Usage: `export blockchain into file`, - }, - { - Action: upgradeDb, - Name: "upgradedb", - Usage: "upgrade chainblock database", - }, - { - Action: removeDb, - Name: "removedb", - Usage: "Remove blockchain and state databases", - }, } app.Flags = []cli.Flag{ utils.IdentityFlag, @@ -508,103 +479,6 @@ func accountImport(ctx *cli.Context) { fmt.Printf("Address: %x\n", acct) } -func importchain(ctx *cli.Context) { - if len(ctx.Args()) != 1 { - utils.Fatalf("This command requires an argument.") - } - chain, blockDB, stateDB, extraDB := utils.GetChain(ctx) - start := time.Now() - if err := utils.ImportChain(chain, ctx.Args().First()); err != nil { - utils.Fatalf("Import error: %v\n", err) - } - flushAll(blockDB, stateDB, extraDB) - fmt.Printf("Import done in %v", time.Since(start)) -} - -func exportchain(ctx *cli.Context) { - if len(ctx.Args()) != 1 { - utils.Fatalf("This command requires an argument.") - } - chain, _, _, _ := utils.GetChain(ctx) - start := time.Now() - if err := utils.ExportChain(chain, ctx.Args().First()); err != nil { - utils.Fatalf("Export error: %v\n", err) - } - fmt.Printf("Export done in %v", time.Since(start)) -} - -func removeDb(ctx *cli.Context) { - confirm, err := utils.PromptConfirm("Remove local databases?") - if err != nil { - utils.Fatalf("%v", err) - } - - if confirm { - fmt.Println("Removing chain and state databases...") - start := time.Now() - - os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "blockchain")) - os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "state")) - - fmt.Printf("Removed in %v\n", time.Since(start)) - } else { - fmt.Println("Operation aborted") - } -} - -func upgradeDb(ctx *cli.Context) { - glog.Infoln("Upgrading blockchain database") - - chain, blockDB, stateDB, extraDB := utils.GetChain(ctx) - v, _ := blockDB.Get([]byte("BlockchainVersion")) - bcVersion := int(common.NewValue(v).Uint()) - if bcVersion == 0 { - bcVersion = core.BlockChainVersion - } - - // Export the current chain. - filename := fmt.Sprintf("blockchain_%d_%s.chain", bcVersion, time.Now().Format("20060102_150405")) - exportFile := filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), filename) - if err := utils.ExportChain(chain, exportFile); err != nil { - utils.Fatalf("Unable to export chain for reimport %s\n", err) - } - flushAll(blockDB, stateDB, extraDB) - os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "blockchain")) - os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "state")) - - // Import the chain file. - chain, blockDB, stateDB, extraDB = utils.GetChain(ctx) - blockDB.Put([]byte("BlockchainVersion"), common.NewValue(core.BlockChainVersion).Bytes()) - err := utils.ImportChain(chain, exportFile) - flushAll(blockDB, stateDB, extraDB) - if err != nil { - utils.Fatalf("Import error %v (a backup is made in %s, use the import command to import it)\n", err, exportFile) - } else { - os.Remove(exportFile) - glog.Infoln("Import finished") - } -} - -func dump(ctx *cli.Context) { - chain, _, stateDB, _ := utils.GetChain(ctx) - for _, arg := range ctx.Args() { - var block *types.Block - if hashish(arg) { - block = chain.GetBlock(common.HexToHash(arg)) - } else { - num, _ := strconv.Atoi(arg) - block = chain.GetBlockByNumber(uint64(num)) - } - if block == nil { - fmt.Println("{}") - utils.Fatalf("block not found") - } else { - state := state.New(block.Root(), stateDB) - fmt.Printf("%s\n", state.Dump()) - } - } -} - func makedag(ctx *cli.Context) { args := ctx.Args() wrongArgs := func() { @@ -647,16 +521,3 @@ func version(c *cli.Context) { fmt.Printf("GOPATH=%s\n", os.Getenv("GOPATH")) fmt.Printf("GOROOT=%s\n", runtime.GOROOT()) } - -// hashish returns true for strings that look like hashes. -func hashish(x string) bool { - _, err := strconv.Atoi(x) - return err != nil -} - -func flushAll(dbs ...common.Database) { - for _, db := range dbs { - db.Flush() - db.Close() - } -} From 8e4512a5e7eecb53f41996776d55fe7acc92330c Mon Sep 17 00:00:00 2001 From: Felix Lange Date: Wed, 27 May 2015 14:10:02 +0200 Subject: [PATCH 19/34] p2p/nat: bump timeout in TestAutoDiscRace --- p2p/nat/nat_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/p2p/nat/nat_test.go b/p2p/nat/nat_test.go index 95c50522e2..b62640b4b3 100644 --- a/p2p/nat/nat_test.go +++ b/p2p/nat/nat_test.go @@ -30,7 +30,7 @@ func TestAutoDiscRace(t *testing.T) { } // Check that they all return the correct result within the deadline. - deadline := time.After(550 * time.Millisecond) + deadline := time.After(2 * time.Second) for i := 0; i < cap(results); i++ { select { case <-deadline: From 74706a0f029fe74ea15e366904d827da03091fef Mon Sep 17 00:00:00 2001 From: Felix Lange Date: Wed, 27 May 2015 14:50:31 +0200 Subject: [PATCH 20/34] cmd/geth, cmd/utils: rename utils.Get* -> utils.Make* The renaming should make it clearer that these functions create a new instance for every call. @obscuren suggested this renaming a while ago. --- cmd/geth/chaincmd.go | 10 +++++----- cmd/geth/main.go | 8 ++++---- cmd/utils/flags.go | 19 ++++++++++++------- 3 files changed, 21 insertions(+), 16 deletions(-) diff --git a/cmd/geth/chaincmd.go b/cmd/geth/chaincmd.go index e17d1dc9b0..1aed800f66 100644 --- a/cmd/geth/chaincmd.go +++ b/cmd/geth/chaincmd.go @@ -52,7 +52,7 @@ func importChain(ctx *cli.Context) { if len(ctx.Args()) != 1 { utils.Fatalf("This command requires an argument.") } - chain, blockDB, stateDB, extraDB := utils.GetChain(ctx) + chain, blockDB, stateDB, extraDB := utils.MakeChain(ctx) start := time.Now() if err := utils.ImportChain(chain, ctx.Args().First()); err != nil { utils.Fatalf("Import error: %v\n", err) @@ -65,7 +65,7 @@ func exportChain(ctx *cli.Context) { if len(ctx.Args()) != 1 { utils.Fatalf("This command requires an argument.") } - chain, _, _, _ := utils.GetChain(ctx) + chain, _, _, _ := utils.MakeChain(ctx) start := time.Now() if err := utils.ExportChain(chain, ctx.Args().First()); err != nil { utils.Fatalf("Export error: %v\n", err) @@ -95,7 +95,7 @@ func removeDB(ctx *cli.Context) { func upgradeDB(ctx *cli.Context) { glog.Infoln("Upgrading blockchain database") - chain, blockDB, stateDB, extraDB := utils.GetChain(ctx) + chain, blockDB, stateDB, extraDB := utils.MakeChain(ctx) v, _ := blockDB.Get([]byte("BlockchainVersion")) bcVersion := int(common.NewValue(v).Uint()) if bcVersion == 0 { @@ -113,7 +113,7 @@ func upgradeDB(ctx *cli.Context) { os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "state")) // Import the chain file. - chain, blockDB, stateDB, extraDB = utils.GetChain(ctx) + chain, blockDB, stateDB, extraDB = utils.MakeChain(ctx) blockDB.Put([]byte("BlockchainVersion"), common.NewValue(core.BlockChainVersion).Bytes()) err := utils.ImportChain(chain, exportFile) flushAll(blockDB, stateDB, extraDB) @@ -126,7 +126,7 @@ func upgradeDB(ctx *cli.Context) { } func dump(ctx *cli.Context) { - chain, _, stateDB, _ := utils.GetChain(ctx) + chain, _, stateDB, _ := utils.MakeChain(ctx) for _, arg := range ctx.Args() { var block *types.Block if hashish(arg) { diff --git a/cmd/geth/main.go b/cmd/geth/main.go index 9bf0fe6100..eecd7546a8 100644 --- a/cmd/geth/main.go +++ b/cmd/geth/main.go @@ -394,7 +394,7 @@ func startEth(ctx *cli.Context, eth *eth.Ethereum) { } func accountList(ctx *cli.Context) { - am := utils.GetAccountManager(ctx) + am := utils.MakeAccountManager(ctx) accts, err := am.Accounts() if err != nil { utils.Fatalf("Could not list accounts: %v", err) @@ -436,7 +436,7 @@ func getPassPhrase(ctx *cli.Context, desc string, confirmation bool) (passphrase } func accountCreate(ctx *cli.Context) { - am := utils.GetAccountManager(ctx) + am := utils.MakeAccountManager(ctx) passphrase := getPassPhrase(ctx, "Your new account is locked with a password. Please give a password. Do not forget this password.", true) acct, err := am.NewAccount(passphrase) if err != nil { @@ -455,7 +455,7 @@ func importWallet(ctx *cli.Context) { utils.Fatalf("Could not read wallet file: %v", err) } - am := utils.GetAccountManager(ctx) + am := utils.MakeAccountManager(ctx) passphrase := getPassPhrase(ctx, "", false) acct, err := am.ImportPreSaleKey(keyJson, passphrase) @@ -470,7 +470,7 @@ func accountImport(ctx *cli.Context) { if len(keyfile) == 0 { utils.Fatalf("keyfile must be given as argument") } - am := utils.GetAccountManager(ctx) + am := utils.MakeAccountManager(ctx) passphrase := getPassPhrase(ctx, "Your new account is locked with a password. Please give a password. Do not forget this password.", true) acct, err := am.Import(keyfile, passphrase) if err != nil { diff --git a/cmd/utils/flags.go b/cmd/utils/flags.go index 176a546f1d..d319055b17 100644 --- a/cmd/utils/flags.go +++ b/cmd/utils/flags.go @@ -256,7 +256,8 @@ var ( } ) -func GetNAT(ctx *cli.Context) nat.Interface { +// MakeNAT creates a port mapper from set command line flags. +func MakeNAT(ctx *cli.Context) nat.Interface { natif, err := nat.Parse(ctx.GlobalString(NATFlag.Name)) if err != nil { Fatalf("Option %s: %v", NATFlag.Name, err) @@ -264,7 +265,8 @@ func GetNAT(ctx *cli.Context) nat.Interface { return natif } -func GetNodeKey(ctx *cli.Context) (key *ecdsa.PrivateKey) { +// MakeNodeKey creates a node key from set command line flags. +func MakeNodeKey(ctx *cli.Context) (key *ecdsa.PrivateKey) { hex, file := ctx.GlobalString(NodeKeyHexFlag.Name), ctx.GlobalString(NodeKeyFileFlag.Name) var err error switch { @@ -282,6 +284,7 @@ func GetNodeKey(ctx *cli.Context) (key *ecdsa.PrivateKey) { return key } +// MakeEthConfig creates ethereum options from set command line flags. func MakeEthConfig(clientID, version string, ctx *cli.Context) *eth.Config { customName := ctx.GlobalString(IdentityFlag.Name) if len(customName) > 0 { @@ -299,15 +302,15 @@ func MakeEthConfig(clientID, version string, ctx *cli.Context) *eth.Config { LogJSON: ctx.GlobalString(LogJSONFlag.Name), Etherbase: ctx.GlobalString(EtherbaseFlag.Name), MinerThreads: ctx.GlobalInt(MinerThreadsFlag.Name), - AccountManager: GetAccountManager(ctx), + AccountManager: MakeAccountManager(ctx), VmDebug: ctx.GlobalBool(VMDebugFlag.Name), MaxPeers: ctx.GlobalInt(MaxPeersFlag.Name), MaxPendingPeers: ctx.GlobalInt(MaxPendingPeersFlag.Name), Port: ctx.GlobalString(ListenPortFlag.Name), - NAT: GetNAT(ctx), + NAT: MakeNAT(ctx), NatSpec: ctx.GlobalBool(NatspecEnabledFlag.Name), Discovery: !ctx.GlobalBool(NoDiscoverFlag.Name), - NodeKey: GetNodeKey(ctx), + NodeKey: MakeNodeKey(ctx), Shh: ctx.GlobalBool(WhisperEnabledFlag.Name), Dial: true, BootNodes: ctx.GlobalString(BootnodesFlag.Name), @@ -325,7 +328,8 @@ func SetupLogger(ctx *cli.Context) { glog.SetLogDir(ctx.GlobalString(LogFileFlag.Name)) } -func GetChain(ctx *cli.Context) (chain *core.ChainManager, blockDB, stateDB, extraDB common.Database) { +// MakeChain creates a chain manager from set command line flags. +func MakeChain(ctx *cli.Context) (chain *core.ChainManager, blockDB, stateDB, extraDB common.Database) { dd := ctx.GlobalString(DataDirFlag.Name) var err error if blockDB, err = ethdb.NewLDBDatabase(filepath.Join(dd, "blockchain")); err != nil { @@ -347,7 +351,8 @@ func GetChain(ctx *cli.Context) (chain *core.ChainManager, blockDB, stateDB, ext return chain, blockDB, stateDB, extraDB } -func GetAccountManager(ctx *cli.Context) *accounts.Manager { +// MakeChain creates an account manager from set command line flags. +func MakeAccountManager(ctx *cli.Context) *accounts.Manager { dataDir := ctx.GlobalString(DataDirFlag.Name) ks := crypto.NewKeyStorePassphrase(filepath.Join(dataDir, "keystore")) return accounts.NewManager(ks) From 705beb4c25f976f6bce40818bd835e235c2babf4 Mon Sep 17 00:00:00 2001 From: Felix Lange Date: Wed, 27 May 2015 15:48:07 +0200 Subject: [PATCH 21/34] cmd/utils: print errors only once if stdout and stderr are the same file --- cmd/utils/cmd.go | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/cmd/utils/cmd.go b/cmd/utils/cmd.go index 550ac1c514..06c240bd4f 100644 --- a/cmd/utils/cmd.go +++ b/cmd/utils/cmd.go @@ -125,10 +125,17 @@ func initDataDir(Datadir string) { } } -// Fatalf formats a message to standard output and exits the program. +// Fatalf formats a message to standard error and exits the program. +// The message is also printed to standard output if standard error +// is redirected to a different file. func Fatalf(format string, args ...interface{}) { - fmt.Fprintf(os.Stderr, "Fatal: "+format+"\n", args...) - fmt.Fprintf(os.Stdout, "Fatal: "+format+"\n", args...) + w := io.MultiWriter(os.Stdout, os.Stderr) + outf, _ := os.Stdout.Stat() + errf, _ := os.Stderr.Stat() + if outf != nil && errf != nil && os.SameFile(outf, errf) { + w = os.Stderr + } + fmt.Fprintf(w, "Fatal: "+format+"\n", args...) logger.Flush() os.Exit(1) } From 67effb94b6fadac7b207cb5333c91f578326762e Mon Sep 17 00:00:00 2001 From: Felix Lange Date: Wed, 27 May 2015 16:02:08 +0200 Subject: [PATCH 22/34] cmd/geth, cmd/utils: make chain importing interruptible Interrupting import with Ctrl-C could cause database corruption because the signal wasn't handled. utils.ImportChain now checks for a queued interrupt on every batch. --- cmd/geth/chaincmd.go | 11 ++++++----- cmd/utils/cmd.go | 38 +++++++++++++++++++++++++++++++++----- 2 files changed, 39 insertions(+), 10 deletions(-) diff --git a/cmd/geth/chaincmd.go b/cmd/geth/chaincmd.go index 1aed800f66..6245c691ba 100644 --- a/cmd/geth/chaincmd.go +++ b/cmd/geth/chaincmd.go @@ -54,10 +54,11 @@ func importChain(ctx *cli.Context) { } chain, blockDB, stateDB, extraDB := utils.MakeChain(ctx) start := time.Now() - if err := utils.ImportChain(chain, ctx.Args().First()); err != nil { - utils.Fatalf("Import error: %v\n", err) - } + err := utils.ImportChain(chain, ctx.Args().First()) flushAll(blockDB, stateDB, extraDB) + if err != nil { + utils.Fatalf("Import error: %v", err) + } fmt.Printf("Import done in %v", time.Since(start)) } @@ -106,7 +107,7 @@ func upgradeDB(ctx *cli.Context) { filename := fmt.Sprintf("blockchain_%d_%s.chain", bcVersion, time.Now().Format("20060102_150405")) exportFile := filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), filename) if err := utils.ExportChain(chain, exportFile); err != nil { - utils.Fatalf("Unable to export chain for reimport %s\n", err) + utils.Fatalf("Unable to export chain for reimport %s", err) } flushAll(blockDB, stateDB, extraDB) os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "blockchain")) @@ -118,7 +119,7 @@ func upgradeDB(ctx *cli.Context) { err := utils.ImportChain(chain, exportFile) flushAll(blockDB, stateDB, extraDB) if err != nil { - utils.Fatalf("Import error %v (a backup is made in %s, use the import command to import it)\n", err, exportFile) + utils.Fatalf("Import error %v (a backup is made in %s, use the import command to import it)", err, exportFile) } else { os.Remove(exportFile) glog.Infoln("Import finished") diff --git a/cmd/utils/cmd.go b/cmd/utils/cmd.go index 06c240bd4f..2e2b627df7 100644 --- a/cmd/utils/cmd.go +++ b/cmd/utils/cmd.go @@ -173,22 +173,47 @@ func FormatTransactionData(data string) []byte { return d } -func ImportChain(chainmgr *core.ChainManager, fn string) error { +func ImportChain(chain *core.ChainManager, fn string) error { + // Watch for Ctrl-C while the import is running. + // If a signal is received, the import will stop at the next batch. + interrupt := make(chan os.Signal, 1) + stop := make(chan struct{}) + signal.Notify(interrupt, os.Interrupt) + defer signal.Stop(interrupt) + defer close(interrupt) + go func() { + if _, ok := <-interrupt; ok { + glog.Info("caught interrupt during import, will stop at next batch") + } + close(stop) + }() + checkInterrupt := func() bool { + select { + case <-stop: + return true + default: + return false + } + } + glog.Infoln("Importing blockchain", fn) - fh, err := os.OpenFile(fn, os.O_RDONLY, os.ModePerm) + fh, err := os.Open(fn) if err != nil { return err } defer fh.Close() - - chainmgr.Reset() stream := rlp.NewStream(fh, 0) + // Remove all existing blocks and start the import. + chain.Reset() batchSize := 2500 blocks := make(types.Blocks, batchSize) n := 0 for { // Load a batch of RLP blocks. + if checkInterrupt() { + return fmt.Errorf("interrupted") + } i := 0 for ; i < batchSize; i++ { var b types.Block @@ -204,7 +229,10 @@ func ImportChain(chainmgr *core.ChainManager, fn string) error { break } // Import the batch. - if _, err := chainmgr.InsertChain(blocks[:i]); err != nil { + if checkInterrupt() { + return fmt.Errorf("interrupted") + } + if _, err := chain.InsertChain(blocks[:i]); err != nil { return fmt.Errorf("invalid block %d: %v", n, err) } } From a8bc2181c94f5d3a9455c4fa526f8722a21ecb04 Mon Sep 17 00:00:00 2001 From: Felix Lange Date: Wed, 27 May 2015 17:35:08 +0200 Subject: [PATCH 23/34] cmd/utils: skip batches with known blocks during import This makes block importing restartable. --- cmd/utils/cmd.go | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/cmd/utils/cmd.go b/cmd/utils/cmd.go index 2e2b627df7..2a5e2ec6ad 100644 --- a/cmd/utils/cmd.go +++ b/cmd/utils/cmd.go @@ -204,12 +204,11 @@ func ImportChain(chain *core.ChainManager, fn string) error { defer fh.Close() stream := rlp.NewStream(fh, 0) - // Remove all existing blocks and start the import. - chain.Reset() + // Run actual the import. batchSize := 2500 blocks := make(types.Blocks, batchSize) n := 0 - for { + for batch := 0; ; batch++ { // Load a batch of RLP blocks. if checkInterrupt() { return fmt.Errorf("interrupted") @@ -232,6 +231,11 @@ func ImportChain(chain *core.ChainManager, fn string) error { if checkInterrupt() { return fmt.Errorf("interrupted") } + if hasAllBlocks(chain, blocks[:i]) { + glog.Infof("skipping batch %d, all blocks present [%x / %x]", + batch, blocks[0].Hash().Bytes()[:4], blocks[i-1].Hash().Bytes()[:4]) + continue + } if _, err := chain.InsertChain(blocks[:i]); err != nil { return fmt.Errorf("invalid block %d: %v", n, err) } @@ -239,6 +243,15 @@ func ImportChain(chain *core.ChainManager, fn string) error { return nil } +func hasAllBlocks(chain *core.ChainManager, bs []*types.Block) bool { + for _, b := range bs { + if !chain.HasBlock(b.Hash()) { + return false + } + } + return true +} + func ExportChain(chainmgr *core.ChainManager, fn string) error { glog.Infoln("Exporting blockchain to", fn) fh, err := os.OpenFile(fn, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, os.ModePerm) From e1fe75e3b637758f99ddbcaeb01eafa1a0b6455e Mon Sep 17 00:00:00 2001 From: Felix Lange Date: Thu, 28 May 2015 01:16:57 +0200 Subject: [PATCH 24/34] cmd/utils: use constant for import batch size --- cmd/utils/cmd.go | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/cmd/utils/cmd.go b/cmd/utils/cmd.go index 2a5e2ec6ad..e5413973de 100644 --- a/cmd/utils/cmd.go +++ b/cmd/utils/cmd.go @@ -40,6 +40,10 @@ import ( "github.com/peterh/liner" ) +const ( + importBatchSize = 2500 +) + var interruptCallbacks = []func(os.Signal){} // Register interrupt handlers callbacks @@ -205,8 +209,7 @@ func ImportChain(chain *core.ChainManager, fn string) error { stream := rlp.NewStream(fh, 0) // Run actual the import. - batchSize := 2500 - blocks := make(types.Blocks, batchSize) + blocks := make(types.Blocks, importBatchSize) n := 0 for batch := 0; ; batch++ { // Load a batch of RLP blocks. @@ -214,7 +217,7 @@ func ImportChain(chain *core.ChainManager, fn string) error { return fmt.Errorf("interrupted") } i := 0 - for ; i < batchSize; i++ { + for ; i < importBatchSize; i++ { var b types.Block if err := stream.Decode(&b); err == io.EOF { break From e84bbcce3c335b863eb6304ad910047054b68c20 Mon Sep 17 00:00:00 2001 From: Felix Lange Date: Thu, 28 May 2015 01:20:03 +0200 Subject: [PATCH 25/34] cmd/geth: don't flush databases after import --- cmd/geth/chaincmd.go | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/cmd/geth/chaincmd.go b/cmd/geth/chaincmd.go index 6245c691ba..947532f408 100644 --- a/cmd/geth/chaincmd.go +++ b/cmd/geth/chaincmd.go @@ -55,7 +55,7 @@ func importChain(ctx *cli.Context) { chain, blockDB, stateDB, extraDB := utils.MakeChain(ctx) start := time.Now() err := utils.ImportChain(chain, ctx.Args().First()) - flushAll(blockDB, stateDB, extraDB) + closeAll(blockDB, stateDB, extraDB) if err != nil { utils.Fatalf("Import error: %v", err) } @@ -109,7 +109,7 @@ func upgradeDB(ctx *cli.Context) { if err := utils.ExportChain(chain, exportFile); err != nil { utils.Fatalf("Unable to export chain for reimport %s", err) } - flushAll(blockDB, stateDB, extraDB) + closeAll(blockDB, stateDB, extraDB) os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "blockchain")) os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "state")) @@ -117,7 +117,7 @@ func upgradeDB(ctx *cli.Context) { chain, blockDB, stateDB, extraDB = utils.MakeChain(ctx) blockDB.Put([]byte("BlockchainVersion"), common.NewValue(core.BlockChainVersion).Bytes()) err := utils.ImportChain(chain, exportFile) - flushAll(blockDB, stateDB, extraDB) + closeAll(blockDB, stateDB, extraDB) if err != nil { utils.Fatalf("Import error %v (a backup is made in %s, use the import command to import it)", err, exportFile) } else { @@ -152,9 +152,8 @@ func hashish(x string) bool { return err != nil } -func flushAll(dbs ...common.Database) { +func closeAll(dbs ...common.Database) { for _, db := range dbs { - db.Flush() db.Close() } } From 29b0480cfb2f2bd6c350fdce0063312ad691d7b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A9ter=20Szil=C3=A1gyi?= Date: Thu, 28 May 2015 13:06:10 +0300 Subject: [PATCH 26/34] core, eth/downloader: expose the bad hashes, check in downloader --- core/blocks.go | 7 ++++--- core/chain_manager.go | 2 +- core/manager.go | 6 ------ eth/downloader/downloader.go | 16 ++++++++++++++++ eth/sync.go | 3 +-- 5 files changed, 22 insertions(+), 12 deletions(-) diff --git a/core/blocks.go b/core/blocks.go index d09242a07b..35e170af32 100644 --- a/core/blocks.go +++ b/core/blocks.go @@ -2,7 +2,8 @@ package core import "github.com/ethereum/go-ethereum/common" -var badHashes = []common.Hash{ - common.HexToHash("f269c503aed286caaa0d114d6a5320e70abbc2febe37953207e76a2873f2ba79"), - common.HexToHash("38f5bbbffd74804820ffa4bab0cd540e9de229725afb98c1a7e57936f4a714bc"), +// Set of manually tracked bad hashes (usually hard forks) +var BadHashes = map[common.Hash]bool{ + common.HexToHash("f269c503aed286caaa0d114d6a5320e70abbc2febe37953207e76a2873f2ba79"): true, + common.HexToHash("38f5bbbffd74804820ffa4bab0cd540e9de229725afb98c1a7e57936f4a714bc"): true, } diff --git a/core/chain_manager.go b/core/chain_manager.go index ee73145c10..edf8825f35 100644 --- a/core/chain_manager.go +++ b/core/chain_manager.go @@ -121,7 +121,7 @@ func NewChainManager(blockDb, stateDb common.Database, pow pow.PoW, mux *event.T bc.setLastState() // Check the current state of the block hashes and make sure that we do not have any of the bad blocks in our chain - for _, hash := range badHashes { + for hash, _ := range BadHashes { if block := bc.GetBlock(hash); block != nil { glog.V(logger.Error).Infof("Found bad hash. Reorganising chain to state %x\n", block.ParentHash().Bytes()[:4]) block = bc.GetBlock(block.ParentHash()) diff --git a/core/manager.go b/core/manager.go index 695f0e99c9..ba0ecf9d16 100644 --- a/core/manager.go +++ b/core/manager.go @@ -3,9 +3,7 @@ package core import ( "github.com/ethereum/go-ethereum/accounts" "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/eth/downloader" "github.com/ethereum/go-ethereum/event" - "github.com/ethereum/go-ethereum/p2p" ) // TODO move this to types? @@ -14,11 +12,7 @@ type Backend interface { BlockProcessor() *BlockProcessor ChainManager() *ChainManager TxPool() *TxPool - PeerCount() int - IsListening() bool - Peers() []*p2p.Peer BlockDb() common.Database StateDb() common.Database EventMux() *event.TypeMux - Downloader() *downloader.Downloader } diff --git a/eth/downloader/downloader.go b/eth/downloader/downloader.go index 421c336f27..85531ce155 100644 --- a/eth/downloader/downloader.go +++ b/eth/downloader/downloader.go @@ -7,7 +7,10 @@ import ( "sync/atomic" "time" + "gopkg.in/fatih/set.v0" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/event" "github.com/ethereum/go-ethereum/logger" @@ -75,6 +78,7 @@ type Downloader struct { queue *queue // Scheduler for selecting the hashes to download peers *peerSet // Set of active peers from which download can proceed checks map[common.Hash]*crossCheck // Pending cross checks to verify a hash chain + banned *set.SetNonTS // Set of hashes we've received and banned // Callbacks hasBlock hashCheckFn @@ -100,6 +104,7 @@ type Block struct { } func New(mux *event.TypeMux, hasBlock hashCheckFn, getBlock getBlockFn) *Downloader { + // Create the base downloader downloader := &Downloader{ mux: mux, queue: newQueue(), @@ -110,6 +115,11 @@ func New(mux *event.TypeMux, hasBlock hashCheckFn, getBlock getBlockFn) *Downloa hashCh: make(chan hashPack, 1), blockCh: make(chan blockPack, 1), } + // Inject all the known bad hashes + downloader.banned = set.NewNonTS() + for hash, _ := range core.BadHashes { + downloader.banned.Add(hash) + } return downloader } @@ -280,6 +290,12 @@ func (d *Downloader) fetchHashes(p *peer, h common.Hash) error { glog.V(logger.Debug).Infof("Peer (%s) responded with empty hash set\n", active.id) return errEmptyHashSet } + for _, hash := range hashPack.hashes { + if d.banned.Has(hash) { + glog.V(logger.Debug).Infof("Peer (%s) sent a known invalid chain\n", active.id) + return ErrInvalidChain + } + } // Determine if we're done fetching hashes (queue up all pending), and continue if not done done, index := false, 0 for index, head = range hashPack.hashes { diff --git a/eth/sync.go b/eth/sync.go index cf549f852c..76e1376304 100644 --- a/eth/sync.go +++ b/eth/sync.go @@ -70,6 +70,7 @@ func (pm *ProtocolManager) processBlocks() error { // Try to inset the blocks, drop the originating peer if there's an error index, err := pm.chainman.InsertChain(raw) if err != nil { + glog.V(logger.Debug).Infoln("Downloaded block import failed:", err) pm.removePeer(blocks[index].OriginPeer) pm.downloader.Cancel() return err @@ -84,12 +85,10 @@ func (pm *ProtocolManager) processBlocks() error { func (pm *ProtocolManager) synchronise(peer *peer) { // Short circuit if no peers are available if peer == nil { - glog.V(logger.Debug).Infoln("Synchronisation canceled: no peers available") return } // Make sure the peer's TD is higher than our own. If not drop. if peer.td.Cmp(pm.chainman.Td()) <= 0 { - glog.V(logger.Debug).Infoln("Synchronisation canceled: peer's total difficulty is too small") return } // FIXME if we have the hash in our chain and the TD of the peer is From 598e454d46b62dda1a978529a52042e66dc2aaad Mon Sep 17 00:00:00 2001 From: obscuren Date: Thu, 28 May 2015 13:24:09 +0200 Subject: [PATCH 27/34] cmd/geth: updated web3 --- jsre/ethereum_js.go | 1692 ++++--------------------------------------- 1 file changed, 146 insertions(+), 1546 deletions(-) diff --git a/jsre/ethereum_js.go b/jsre/ethereum_js.go index 6c0ae33d7b..74f6b2acd6 100644 --- a/jsre/ethereum_js.go +++ b/jsre/ethereum_js.go @@ -1,6 +1,7 @@ package jsre -const Ethereum_JS = `require=(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o. */ -/** +/** * @file coder.js * @author Marek Kotewicz * @date 2015 @@ -72,7 +73,7 @@ SolidityType.prototype.isType = function (name) { * @method formatInput * @param {Object} param - plain object, or an array of objects * @param {Bool} arrayType - true if a param should be encoded as an array - * @return {SolidityParam} encoded param wrapped in SolidityParam object + * @return {SolidityParam} encoded param wrapped in SolidityParam object */ SolidityType.prototype.formatInput = function (param, arrayType) { if (utils.isArray(param) && arrayType) { // TODO: should fail if this two are not the same @@ -82,7 +83,7 @@ SolidityType.prototype.formatInput = function (param, arrayType) { }).reduce(function (acc, current) { return acc.combine(current); }, f.formatInputInt(param.length)).withOffset(32); - } + } return this._inputFormatter(param); }; @@ -96,7 +97,7 @@ SolidityType.prototype.formatInput = function (param, arrayType) { */ SolidityType.prototype.formatOutput = function (param, arrayType) { if (arrayType) { - // let's assume, that we solidity will never return long arrays :P + // let's assume, that we solidity will never return long arrays :P var result = []; var length = new BigNumber(param.dynamicPart().slice(0, 64), 16); for (var i = 0; i < length * 64; i += 64) { @@ -137,7 +138,7 @@ var SolidityCoder = function (types) { * * @method _requireType * @param {String} type - * @returns {SolidityType} + * @returns {SolidityType} * @throws {Error} throws if no matching type is found */ SolidityCoder.prototype._requireType = function (type) { @@ -284,7 +285,7 @@ var coder = new SolidityCoder([ module.exports = coder; -},{"../utils/utils":7,"./formatters":2,"./param":3,"bignumber.js":"bignumber.js"}],2:[function(require,module,exports){ +},{"../utils/utils":6,"./formatters":2,"./param":3,"bignumber.js":"bignumber.js"}],2:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -301,7 +302,7 @@ module.exports = coder; You should have received a copy of the GNU Lesser General Public License along with ethereum.js. If not, see . */ -/** +/** * @file formatters.js * @author Marek Kotewicz * @date 2015 @@ -426,7 +427,7 @@ var formatOutputUInt = function (param) { * @returns {BigNumber} input bytes formatted to real */ var formatOutputReal = function (param) { - return formatOutputInt(param).dividedBy(new BigNumber(2).pow(128)); + return formatOutputInt(param).dividedBy(new BigNumber(2).pow(128)); }; /** @@ -437,7 +438,7 @@ var formatOutputReal = function (param) { * @returns {BigNumber} input bytes formatted to ureal */ var formatOutputUReal = function (param) { - return formatOutputUInt(param).dividedBy(new BigNumber(2).pow(128)); + return formatOutputUInt(param).dividedBy(new BigNumber(2).pow(128)); }; /** @@ -504,7 +505,7 @@ module.exports = { }; -},{"../utils/config":5,"../utils/utils":7,"./param":3,"bignumber.js":"bignumber.js"}],3:[function(require,module,exports){ +},{"../utils/config":5,"../utils/utils":6,"./param":3,"bignumber.js":"bignumber.js"}],3:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -521,7 +522,7 @@ module.exports = { You should have received a copy of the GNU Lesser General Public License along with ethereum.js. If not, see . */ -/** +/** * @file param.js * @author Marek Kotewicz * @date 2015 @@ -540,7 +541,7 @@ var SolidityParam = function (value, offset) { /** * This method should be used to get length of params's dynamic part - * + * * @method dynamicPartLength * @returns {Number} length of dynamic part (in bytes) */ @@ -568,7 +569,7 @@ SolidityParam.prototype.withOffset = function (offset) { * @param {SolidityParam} result of combination */ SolidityParam.prototype.combine = function (param) { - return new SolidityParam(this.value + param.value); + return new SolidityParam(this.value + param.value); }; /** @@ -579,7 +580,7 @@ SolidityParam.prototype.combine = function (param) { * @returns {Boolean} */ SolidityParam.prototype.isDynamic = function () { - return this.value.length > 64; + return this.value.length > 64 || this.offset !== undefined; }; /** @@ -600,8 +601,8 @@ SolidityParam.prototype.offsetAsBytes = function () { */ SolidityParam.prototype.staticPart = function () { if (!this.isDynamic()) { - return this.value; - } + return this.value; + } return this.offsetAsBytes(); }; @@ -633,7 +634,7 @@ SolidityParam.prototype.encode = function () { * @returns {String} */ SolidityParam.encodeList = function (params) { - + // updating offsets var totalOffset = params.length * 32; var offsetParams = params.map(function (param) { @@ -663,7 +664,7 @@ SolidityParam.encodeList = function (params) { */ SolidityParam.decodeParam = function (bytes, index) { index = index || 0; - return new SolidityParam(bytes.substr(index * 64, 64)); + return new SolidityParam(bytes.substr(index * 64, 64)); }; /** @@ -695,7 +696,7 @@ SolidityParam.decodeBytes = function (bytes, index) { var offset = getOffset(bytes, index); // 2 * , cause we also parse length - return new SolidityParam(bytes.substr(offset * 2, 2 * 64)); + return new SolidityParam(bytes.substr(offset * 2, 2 * 64), 0); }; /** @@ -710,13 +711,13 @@ SolidityParam.decodeArray = function (bytes, index) { index = index || 0; var offset = getOffset(bytes, index); var length = parseInt('0x' + bytes.substr(offset * 2, 64)); - return new SolidityParam(bytes.substr(offset * 2, (length + 1) * 64)); + return new SolidityParam(bytes.substr(offset * 2, (length + 1) * 64), 0); }; module.exports = SolidityParam; -},{"../utils/utils":7}],4:[function(require,module,exports){ +},{"../utils/utils":6}],4:[function(require,module,exports){ 'use strict'; // go env doesn't have and need XMLHttpRequest @@ -752,13 +753,13 @@ if (typeof XMLHttpRequest === 'undefined') { /** * Utils - * + * * @module utils */ /** * Utility functions - * + * * @class [utils] config * @constructor */ @@ -766,26 +767,26 @@ if (typeof XMLHttpRequest === 'undefined') { /// required to define ETH_BIGNUMBER_ROUNDING_MODE var BigNumber = require('bignumber.js'); -var ETH_UNITS = [ - 'wei', - 'Kwei', - 'Mwei', - 'Gwei', - 'szabo', - 'finney', - 'ether', - 'grand', - 'Mether', - 'Gether', - 'Tether', - 'Pether', - 'Eether', - 'Zether', - 'Yether', - 'Nether', - 'Dether', - 'Vether', - 'Uether' +var ETH_UNITS = [ + 'wei', + 'Kwei', + 'Mwei', + 'Gwei', + 'szabo', + 'finney', + 'ether', + 'grand', + 'Mether', + 'Gether', + 'Tether', + 'Pether', + 'Eether', + 'Zether', + 'Yether', + 'Nether', + 'Dether', + 'Vether', + 'Uether' ]; module.exports = { @@ -816,39 +817,7 @@ module.exports = { You should have received a copy of the GNU Lesser General Public License along with ethereum.js. If not, see . */ -/** - * @file sha3.js - * @author Marek Kotewicz - * @date 2015 - */ - -var sha3 = require('crypto-js/sha3'); - -module.exports = function (str) { - return sha3(str, { - outputLength: 256 - }).toString(); -}; - - -},{"crypto-js/sha3":30}],7:[function(require,module,exports){ -/* - This file is part of ethereum.js. - - ethereum.js is free software: you can redistribute it and/or modify - it under the terms of the GNU Lesser General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - ethereum.js is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public License - along with ethereum.js. If not, see . -*/ -/** +/** * @file utils.js * @author Marek Kotewicz * @date 2015 @@ -856,13 +825,13 @@ module.exports = function (str) { /** * Utils - * + * * @module utils */ /** * Utility functions - * + * * @class [utils] utils * @constructor */ @@ -901,7 +870,7 @@ var padLeft = function (string, chars, sign) { return new Array(chars - string.length + 1).join(sign ? sign : "0") + string; }; -/** +/** * Should be called to get sting from it's hex representation * * @method toAscii @@ -926,9 +895,9 @@ var toAscii = function(hex) { return str; }; - + /** - * Shold be called to get hex representation (prefixed by 0x) of ascii string + * Shold be called to get hex representation (prefixed by 0x) of ascii string * * @method toHexNative * @param {String} string @@ -945,7 +914,7 @@ var toHexNative = function(str) { }; /** - * Shold be called to get hex representation (prefixed by 0x) of ascii string + * Shold be called to get hex representation (prefixed by 0x) of ascii string * * @method fromAscii * @param {String} string @@ -978,13 +947,13 @@ var transformToFullName = function (json) { /** * Should be called to get display name of contract function - * + * * @method extractDisplayName * @param {String} name of function/event * @returns {String} display name for function/event eg. multiply(uint256) -> multiply */ var extractDisplayName = function (name) { - var length = name.indexOf('('); + var length = name.indexOf('('); return length !== -1 ? name.substr(0, length) : name; }; @@ -1092,7 +1061,7 @@ var getValueOfUnit = function (unit) { var fromWei = function(number, unit) { var returnValue = toBigNumber(number).dividedBy(getValueOfUnit(unit)); - return isBigNumber(number) ? returnValue : returnValue.toString(10); + return isBigNumber(number) ? returnValue : returnValue.toString(10); }; /** @@ -1118,7 +1087,7 @@ var fromWei = function(number, unit) { var toWei = function(number, unit) { var returnValue = toBigNumber(number).times(getValueOfUnit(unit)); - return isBigNumber(number) ? returnValue : returnValue.toString(10); + return isBigNumber(number) ? returnValue : returnValue.toString(10); }; /** @@ -1137,7 +1106,7 @@ var toBigNumber = function(number) { if (isString(number) && (number.indexOf('0x') === 0 || number.indexOf('-0x') === 0)) { return new BigNumber(number.replace('0x',''), 16); } - + return new BigNumber(number.toString(10), 10); }; @@ -1189,7 +1158,7 @@ var toAddress = function (address) { if (isStrictAddress(address)) { return address; } - + if (/^[0-9a-f]{40}$/.test(address)) { return '0x' + address; } @@ -1203,7 +1172,7 @@ var toAddress = function (address) { * * @method isBigNumber * @param {Object} - * @return {Boolean} + * @return {Boolean} */ var isBigNumber = function (object) { return object instanceof BigNumber || @@ -1212,7 +1181,7 @@ var isBigNumber = function (object) { /** * Returns true if object is string, otherwise false - * + * * @method isString * @param {Object} * @return {Boolean} @@ -1263,12 +1232,12 @@ var isBoolean = function (object) { * @return {Boolean} */ var isArray = function (object) { - return object instanceof Array; + return object instanceof Array; }; /** * Returns true if given string is valid json object - * + * * @method isJson * @param {String} * @return {Boolean} @@ -1308,12 +1277,12 @@ module.exports = { }; -},{"bignumber.js":"bignumber.js"}],8:[function(require,module,exports){ +},{"bignumber.js":"bignumber.js"}],7:[function(require,module,exports){ module.exports={ - "version": "0.4.2" + "version": "0.4.3" } -},{}],9:[function(require,module,exports){ +},{}],8:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -1351,9 +1320,17 @@ var utils = require('./utils/utils'); var formatters = require('./web3/formatters'); var RequestManager = require('./web3/requestmanager'); var c = require('./utils/config'); +var Method = require('./web3/method'); var Property = require('./web3/property'); var Batch = require('./web3/batch'); -var sha3 = require('./utils/sha3'); + +var web3Methods = [ + new Method({ + name: 'sha3', + call: 'web3_sha3', + params: 1 + }) +]; var web3Properties = [ new Property({ @@ -1438,7 +1415,6 @@ web3.toBigNumber = utils.toBigNumber; web3.toWei = utils.toWei; web3.fromWei = utils.fromWei; web3.isAddress = utils.isAddress; -web3.sha3 = sha3; web3.createBatch = function () { return new Batch(); }; @@ -1465,6 +1441,7 @@ Object.defineProperty(web3.eth, 'defaultAccount', { }); /// setups all api methods +setupMethods(web3, web3Methods); setupProperties(web3, web3Properties); setupMethods(web3.net, net.methods); setupProperties(web3.net, net.properties); @@ -1476,7 +1453,7 @@ setupMethods(web3.shh, shh.methods); module.exports = web3; -},{"./utils/config":5,"./utils/sha3":6,"./utils/utils":7,"./version.json":8,"./web3/batch":10,"./web3/db":12,"./web3/eth":14,"./web3/filter":16,"./web3/formatters":17,"./web3/net":22,"./web3/property":23,"./web3/requestmanager":25,"./web3/shh":26,"./web3/watches":27}],10:[function(require,module,exports){ +},{"./utils/config":5,"./utils/utils":6,"./version.json":7,"./web3/batch":9,"./web3/db":11,"./web3/eth":13,"./web3/filter":15,"./web3/formatters":16,"./web3/method":20,"./web3/net":21,"./web3/property":22,"./web3/requestmanager":24,"./web3/shh":25,"./web3/watches":26}],9:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -1493,7 +1470,7 @@ module.exports = web3; You should have received a copy of the GNU Lesser General Public License along with ethereum.js. If not, see . */ -/** +/** * @file batch.js * @author Marek Kotewicz * @date 2015 @@ -1533,13 +1510,13 @@ Batch.prototype.execute = function () { requests[index].callback(err, result); } }); - }); + }); }; module.exports = Batch; -},{"./requestmanager":25}],11:[function(require,module,exports){ +},{"./requestmanager":24}],10:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -1556,13 +1533,13 @@ module.exports = Batch; You should have received a copy of the GNU Lesser General Public License along with ethereum.js. If not, see . */ -/** +/** * @file contract.js * @author Marek Kotewicz * @date 2014 */ -var web3 = require('../web3'); +var web3 = require('../web3'); var utils = require('../utils/utils'); var coder = require('../solidity/coder'); var SolidityEvent = require('./event'); @@ -1644,7 +1621,7 @@ var ContractFactory = function (abi) { /** * Should be called to create new contract on a blockchain - * + * * @method new * @param {Any} contract constructor param1 (optional) * @param {Any} contract constructor param2 (optional) @@ -1677,14 +1654,14 @@ ContractFactory.prototype.new = function () { var address = web3.eth.sendTransaction(options); return this.at(address); } - + var self = this; web3.eth.sendTransaction(options, function (err, address) { if (err) { callback(err); } - self.at(address, callback); - }); + self.at(address, callback); + }); }; /** @@ -1698,10 +1675,10 @@ ContractFactory.prototype.new = function () { */ ContractFactory.prototype.at = function (address, callback) { // TODO: address is required - + if (callback) { callback(null, new Contract(this.abi, address)); - } + } return new Contract(this.abi, address); }; @@ -1721,7 +1698,7 @@ var Contract = function (abi, address) { module.exports = contract; -},{"../solidity/coder":1,"../utils/utils":7,"../web3":9,"./event":15,"./function":18}],12:[function(require,module,exports){ +},{"../solidity/coder":1,"../utils/utils":6,"../web3":8,"./event":14,"./function":17}],11:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -1779,7 +1756,7 @@ module.exports = { methods: methods }; -},{"./method":21}],13:[function(require,module,exports){ +},{"./method":20}],12:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -1796,7 +1773,7 @@ module.exports = { You should have received a copy of the GNU Lesser General Public License along with ethereum.js. If not, see . */ -/** +/** * @file errors.js * @author Marek Kotewicz * @date 2015 @@ -1819,7 +1796,7 @@ module.exports = { }; -},{}],14:[function(require,module,exports){ +},{}],13:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -2096,7 +2073,7 @@ module.exports = { }; -},{"../utils/utils":7,"./formatters":17,"./method":21,"./property":23}],15:[function(require,module,exports){ +},{"../utils/utils":6,"./formatters":16,"./method":20,"./property":22}],14:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -2113,7 +2090,7 @@ module.exports = { You should have received a copy of the GNU Lesser General Public License along with ethereum.js. If not, see . */ -/** +/** * @file event.js * @author Marek Kotewicz * @date 2014 @@ -2123,7 +2100,6 @@ var utils = require('../utils/utils'); var coder = require('../solidity/coder'); var web3 = require('../web3'); var formatters = require('./formatters'); -var sha3 = require('../utils/sha3'); /** * This prototype should be used to create event filters @@ -2177,12 +2153,12 @@ SolidityEvent.prototype.typeName = function () { * @return {String} event signature */ SolidityEvent.prototype.signature = function () { - return sha3(this._name); + return web3.sha3(web3.fromAscii(this._name)).slice(2); }; /** * Should be used to encode indexed params and options to one final object - * + * * @method encode * @param {Object} indexed * @param {Object} options @@ -2213,7 +2189,7 @@ SolidityEvent.prototype.encode = function (indexed, options) { if (value === undefined || value === null) { return null; } - + if (utils.isArray(value)) { return value.map(function (v) { return '0x' + coder.encodeParam(i.type, v); @@ -2235,17 +2211,17 @@ SolidityEvent.prototype.encode = function (indexed, options) { * @return {Object} result object with decoded indexed && not indexed params */ SolidityEvent.prototype.decode = function (data) { - + data.data = data.data || ''; data.topics = data.topics || []; var argTopics = this._anonymous ? data.topics : data.topics.slice(1); var indexedData = argTopics.map(function (topics) { return topics.slice(2); }).join(""); - var indexedParams = coder.decodeParams(this.types(true), indexedData); + var indexedParams = coder.decodeParams(this.types(true), indexedData); var notIndexedData = data.data.slice(2); var notIndexedParams = coder.decodeParams(this.types(false), notIndexedData); - + var result = formatters.outputLogFormatter(data); result.event = this.displayName(); result.address = data.address; @@ -2293,7 +2269,7 @@ SolidityEvent.prototype.attachToContract = function (contract) { module.exports = SolidityEvent; -},{"../solidity/coder":1,"../utils/sha3":6,"../utils/utils":7,"../web3":9,"./formatters":17}],16:[function(require,module,exports){ +},{"../solidity/coder":1,"../utils/utils":6,"../web3":8,"./formatters":16}],15:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -2350,7 +2326,7 @@ var getOptions = function (options) { if (utils.isString(options)) { return options; - } + } options = options || {}; @@ -2366,8 +2342,8 @@ var getOptions = function (options) { to: options.to, address: options.address, fromBlock: formatters.inputBlockNumberFormatter(options.fromBlock), - toBlock: formatters.inputBlockNumberFormatter(options.toBlock) - }; + toBlock: formatters.inputBlockNumberFormatter(options.toBlock) + }; }; var Filter = function (options, methods, formatter) { @@ -2450,7 +2426,7 @@ Filter.prototype.get = function (callback) { module.exports = Filter; -},{"../utils/utils":7,"./formatters":17,"./requestmanager":25}],17:[function(require,module,exports){ +},{"../utils/utils":6,"./formatters":16,"./requestmanager":24}],16:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -2467,7 +2443,7 @@ module.exports = Filter; You should have received a copy of the GNU Lesser General Public License along with ethereum.js. If not, see . */ -/** +/** * @file formatters.js * @author Marek Kotewicz * @author Fabian Vogelsteller @@ -2531,12 +2507,12 @@ var inputTransactionFormatter = function (options){ options[key] = utils.fromDecimal(options[key]); }); - return options; + return options; }; /** * Formats the output of a transaction to its proper values - * + * * @method outputTransactionFormatter * @param {Object} transaction * @returns {Object} transaction @@ -2555,7 +2531,7 @@ var outputTransactionFormatter = function (tx){ * Formats the output of a block to its proper values * * @method outputBlockFormatter - * @param {Object} block object + * @param {Object} block object * @returns {Object} block object */ var outputBlockFormatter = function(block) { @@ -2582,7 +2558,7 @@ var outputBlockFormatter = function(block) { /** * Formats the output of a log - * + * * @method outputLogFormatter * @param {Object} log object * @returns {Object} log @@ -2623,7 +2599,7 @@ var inputPostFormatter = function(post) { return utils.fromAscii(topic); }); - return post; + return post; }; /** @@ -2670,7 +2646,7 @@ module.exports = { }; -},{"../utils/config":5,"../utils/utils":7}],18:[function(require,module,exports){ +},{"../utils/config":5,"../utils/utils":6}],17:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -2696,7 +2672,6 @@ module.exports = { var web3 = require('../web3'); var coder = require('../solidity/coder'); var utils = require('../utils/utils'); -var sha3 = require('../utils/sha3'); /** * This prototype should be used to call/sendTransaction to solidity functions @@ -2743,7 +2718,7 @@ SolidityFunction.prototype.toPayload = function (args) { * @return {String} function signature */ SolidityFunction.prototype.signature = function () { - return sha3(this._name).slice(0, 8); + return web3.sha3(web3.fromAscii(this._name)).slice(2, 10); }; @@ -2775,8 +2750,8 @@ SolidityFunction.prototype.call = function () { if (!callback) { var output = web3.eth.call(payload); return this.unpackOutput(output); - } - + } + var self = this; web3.eth.call(payload, function (error, output) { callback(error, self.unpackOutput(output)); @@ -2833,10 +2808,10 @@ SolidityFunction.prototype.request = function () { var callback = this.extractCallback(args); var payload = this.toPayload(args); var format = this.unpackOutput.bind(this); - + return { callback: callback, - payload: payload, + payload: payload, format: format }; }; @@ -2879,7 +2854,7 @@ SolidityFunction.prototype.attachToContract = function (contract) { module.exports = SolidityFunction; -},{"../solidity/coder":1,"../utils/sha3":6,"../utils/utils":7,"../web3":9}],19:[function(require,module,exports){ +},{"../solidity/coder":1,"../utils/utils":6,"../web3":8}],18:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -2917,7 +2892,7 @@ HttpProvider.prototype.send = function (payload) { var request = new XMLHttpRequest(); request.open('POST', this.host, false); - + try { request.send(JSON.stringify(payload)); } catch(error) { @@ -2936,7 +2911,7 @@ HttpProvider.prototype.send = function (payload) { try { result = JSON.parse(result); } catch(e) { - throw errors.InvalidResponse(result); + throw errors.InvalidResponse(result); } return result; @@ -2952,7 +2927,7 @@ HttpProvider.prototype.sendAsync = function (payload, callback) { try { result = JSON.parse(result); } catch(e) { - error = errors.InvalidResponse(result); + error = errors.InvalidResponse(result); } callback(error, result); @@ -2971,7 +2946,7 @@ HttpProvider.prototype.sendAsync = function (payload, callback) { module.exports = HttpProvider; -},{"./errors":13,"xmlhttprequest":4}],20:[function(require,module,exports){ +},{"./errors":12,"xmlhttprequest":4}],19:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -3064,7 +3039,7 @@ Jsonrpc.prototype.toBatchPayload = function (messages) { module.exports = Jsonrpc; -},{}],21:[function(require,module,exports){ +},{}],20:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -3125,7 +3100,7 @@ Method.prototype.extractCallback = function (args) { /** * Should be called to check if the number of arguments is correct - * + * * @method validateArgs * @param {Array} arguments * @throws {Error} if it is not @@ -3138,7 +3113,7 @@ Method.prototype.validateArgs = function (args) { /** * Should be called to format input args of method - * + * * @method formatInput * @param {Array} * @return {Array} @@ -3166,7 +3141,7 @@ Method.prototype.formatOutput = function (result) { /** * Should attach function to method - * + * * @method attachToObject * @param {Object} * @param {Function} @@ -3180,7 +3155,7 @@ Method.prototype.attachToObject = function (obj) { obj[name[0]] = obj[name[0]] || {}; obj[name[0]][name[1]] = func; } else { - obj[name[0]] = func; + obj[name[0]] = func; } }; @@ -3238,7 +3213,7 @@ Method.prototype.send = function () { module.exports = Method; -},{"../utils/utils":7,"./errors":13,"./requestmanager":25}],22:[function(require,module,exports){ +},{"../utils/utils":6,"./errors":12,"./requestmanager":24}],21:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -3288,7 +3263,7 @@ module.exports = { }; -},{"../utils/utils":7,"./property":23}],23:[function(require,module,exports){ +},{"../utils/utils":6,"./property":22}],22:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -3324,7 +3299,7 @@ var Property = function (options) { /** * Should be called to format input args of method - * + * * @method formatInput * @param {Array} * @return {Array} @@ -3346,7 +3321,7 @@ Property.prototype.formatOutput = function (result) { /** * Should attach function to method - * + * * @method attachToObject * @param {Object} * @param {Function} @@ -3363,7 +3338,7 @@ Property.prototype.attachToObject = function (obj) { obj = obj[names[0]]; name = names[1]; } - + Object.defineProperty(obj, name, proto); var toAsyncName = function (prefix, name) { @@ -3406,7 +3381,7 @@ Property.prototype.getAsync = function (callback) { module.exports = Property; -},{"./requestmanager":25}],24:[function(require,module,exports){ +},{"./requestmanager":24}],23:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -3441,7 +3416,7 @@ QtSyncProvider.prototype.send = function (payload) { module.exports = QtSyncProvider; -},{}],25:[function(require,module,exports){ +},{}],24:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -3458,7 +3433,7 @@ module.exports = QtSyncProvider; You should have received a copy of the GNU Lesser General Public License along with ethereum.js. If not, see . */ -/** +/** * @file requestmanager.js * @author Jeffrey Wilcke * @author Marek Kotewicz @@ -3540,7 +3515,7 @@ RequestManager.prototype.sendAsync = function (data, callback) { if (err) { return callback(err); } - + if (!Jsonrpc.getInstance().isValidResponse(result)) { return callback(errors.InvalidResponse(result)); } @@ -3573,7 +3548,7 @@ RequestManager.prototype.sendBatch = function (data, callback) { } callback(err, results); - }); + }); }; /** @@ -3626,7 +3601,7 @@ RequestManager.prototype.stopPolling = function (pollId) { */ RequestManager.prototype.reset = function () { this.polls.forEach(function (poll) { - poll.uninstall(poll.id); + poll.uninstall(poll.id); }); this.polls = []; @@ -3664,7 +3639,7 @@ RequestManager.prototype.poll = function () { if (error) { return; } - + if (!utils.isArray(results)) { throw errors.InvalidResponse(results); } @@ -3689,7 +3664,7 @@ RequestManager.prototype.poll = function () { module.exports = RequestManager; -},{"../utils/config":5,"../utils/utils":7,"./errors":13,"./jsonrpc":20}],26:[function(require,module,exports){ +},{"../utils/config":5,"../utils/utils":6,"./errors":12,"./jsonrpc":19}],25:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -3716,8 +3691,8 @@ var Method = require('./method'); var formatters = require('./formatters'); var post = new Method({ - name: 'post', - call: 'shh_post', + name: 'post', + call: 'shh_post', params: 1, inputFormatter: [formatters.inputPostFormatter] }); @@ -3759,7 +3734,7 @@ module.exports = { }; -},{"./formatters":17,"./method":21}],27:[function(require,module,exports){ +},{"./formatters":16,"./method":20}],26:[function(require,module,exports){ /* This file is part of ethereum.js. @@ -3875,1384 +3850,9 @@ module.exports = { }; -},{"./method":21}],28:[function(require,module,exports){ - -},{}],29:[function(require,module,exports){ -;(function (root, factory) { - if (typeof exports === "object") { - // CommonJS - module.exports = exports = factory(); - } - else if (typeof define === "function" && define.amd) { - // AMD - define([], factory); - } - else { - // Global (browser) - root.CryptoJS = factory(); - } -}(this, function () { - - /** - * CryptoJS core components. - */ - var CryptoJS = CryptoJS || (function (Math, undefined) { - /** - * CryptoJS namespace. - */ - var C = {}; - - /** - * Library namespace. - */ - var C_lib = C.lib = {}; - - /** - * Base object for prototypal inheritance. - */ - var Base = C_lib.Base = (function () { - function F() {} - - return { - /** - * Creates a new object that inherits from this object. - * - * @param {Object} overrides Properties to copy into the new object. - * - * @return {Object} The new object. - * - * @static - * - * @example - * - * var MyType = CryptoJS.lib.Base.extend({ - * field: 'value', - * - * method: function () { - * } - * }); - */ - extend: function (overrides) { - // Spawn - F.prototype = this; - var subtype = new F(); - - // Augment - if (overrides) { - subtype.mixIn(overrides); - } - - // Create default initializer - if (!subtype.hasOwnProperty('init')) { - subtype.init = function () { - subtype.$super.init.apply(this, arguments); - }; - } - - // Initializer's prototype is the subtype object - subtype.init.prototype = subtype; - - // Reference supertype - subtype.$super = this; - - return subtype; - }, - - /** - * Extends this object and runs the init method. - * Arguments to create() will be passed to init(). - * - * @return {Object} The new object. - * - * @static - * - * @example - * - * var instance = MyType.create(); - */ - create: function () { - var instance = this.extend(); - instance.init.apply(instance, arguments); - - return instance; - }, - - /** - * Initializes a newly created object. - * Override this method to add some logic when your objects are created. - * - * @example - * - * var MyType = CryptoJS.lib.Base.extend({ - * init: function () { - * // ... - * } - * }); - */ - init: function () { - }, - - /** - * Copies properties into this object. - * - * @param {Object} properties The properties to mix in. - * - * @example - * - * MyType.mixIn({ - * field: 'value' - * }); - */ - mixIn: function (properties) { - for (var propertyName in properties) { - if (properties.hasOwnProperty(propertyName)) { - this[propertyName] = properties[propertyName]; - } - } - - // IE won't copy toString using the loop above - if (properties.hasOwnProperty('toString')) { - this.toString = properties.toString; - } - }, - - /** - * Creates a copy of this object. - * - * @return {Object} The clone. - * - * @example - * - * var clone = instance.clone(); - */ - clone: function () { - return this.init.prototype.extend(this); - } - }; - }()); - - /** - * An array of 32-bit words. - * - * @property {Array} words The array of 32-bit words. - * @property {number} sigBytes The number of significant bytes in this word array. - */ - var WordArray = C_lib.WordArray = Base.extend({ - /** - * Initializes a newly created word array. - * - * @param {Array} words (Optional) An array of 32-bit words. - * @param {number} sigBytes (Optional) The number of significant bytes in the words. - * - * @example - * - * var wordArray = CryptoJS.lib.WordArray.create(); - * var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607]); - * var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607], 6); - */ - init: function (words, sigBytes) { - words = this.words = words || []; - - if (sigBytes != undefined) { - this.sigBytes = sigBytes; - } else { - this.sigBytes = words.length * 4; - } - }, - - /** - * Converts this word array to a string. - * - * @param {Encoder} encoder (Optional) The encoding strategy to use. Default: CryptoJS.enc.Hex - * - * @return {string} The stringified word array. - * - * @example - * - * var string = wordArray + ''; - * var string = wordArray.toString(); - * var string = wordArray.toString(CryptoJS.enc.Utf8); - */ - toString: function (encoder) { - return (encoder || Hex).stringify(this); - }, - - /** - * Concatenates a word array to this word array. - * - * @param {WordArray} wordArray The word array to append. - * - * @return {WordArray} This word array. - * - * @example - * - * wordArray1.concat(wordArray2); - */ - concat: function (wordArray) { - // Shortcuts - var thisWords = this.words; - var thatWords = wordArray.words; - var thisSigBytes = this.sigBytes; - var thatSigBytes = wordArray.sigBytes; - - // Clamp excess bits - this.clamp(); - - // Concat - if (thisSigBytes % 4) { - // Copy one byte at a time - for (var i = 0; i < thatSigBytes; i++) { - var thatByte = (thatWords[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; - thisWords[(thisSigBytes + i) >>> 2] |= thatByte << (24 - ((thisSigBytes + i) % 4) * 8); - } - } else if (thatWords.length > 0xffff) { - // Copy one word at a time - for (var i = 0; i < thatSigBytes; i += 4) { - thisWords[(thisSigBytes + i) >>> 2] = thatWords[i >>> 2]; - } - } else { - // Copy all words at once - thisWords.push.apply(thisWords, thatWords); - } - this.sigBytes += thatSigBytes; - - // Chainable - return this; - }, - - /** - * Removes insignificant bits. - * - * @example - * - * wordArray.clamp(); - */ - clamp: function () { - // Shortcuts - var words = this.words; - var sigBytes = this.sigBytes; - - // Clamp - words[sigBytes >>> 2] &= 0xffffffff << (32 - (sigBytes % 4) * 8); - words.length = Math.ceil(sigBytes / 4); - }, - - /** - * Creates a copy of this word array. - * - * @return {WordArray} The clone. - * - * @example - * - * var clone = wordArray.clone(); - */ - clone: function () { - var clone = Base.clone.call(this); - clone.words = this.words.slice(0); - - return clone; - }, - - /** - * Creates a word array filled with random bytes. - * - * @param {number} nBytes The number of random bytes to generate. - * - * @return {WordArray} The random word array. - * - * @static - * - * @example - * - * var wordArray = CryptoJS.lib.WordArray.random(16); - */ - random: function (nBytes) { - var words = []; - - var r = (function (m_w) { - var m_w = m_w; - var m_z = 0x3ade68b1; - var mask = 0xffffffff; - - return function () { - m_z = (0x9069 * (m_z & 0xFFFF) + (m_z >> 0x10)) & mask; - m_w = (0x4650 * (m_w & 0xFFFF) + (m_w >> 0x10)) & mask; - var result = ((m_z << 0x10) + m_w) & mask; - result /= 0x100000000; - result += 0.5; - return result * (Math.random() > .5 ? 1 : -1); - } - }); - - for (var i = 0, rcache; i < nBytes; i += 4) { - var _r = r((rcache || Math.random()) * 0x100000000); - - rcache = _r() * 0x3ade67b7; - words.push((_r() * 0x100000000) | 0); - } - - return new WordArray.init(words, nBytes); - } - }); - - /** - * Encoder namespace. - */ - var C_enc = C.enc = {}; - - /** - * Hex encoding strategy. - */ - var Hex = C_enc.Hex = { - /** - * Converts a word array to a hex string. - * - * @param {WordArray} wordArray The word array. - * - * @return {string} The hex string. - * - * @static - * - * @example - * - * var hexString = CryptoJS.enc.Hex.stringify(wordArray); - */ - stringify: function (wordArray) { - // Shortcuts - var words = wordArray.words; - var sigBytes = wordArray.sigBytes; - - // Convert - var hexChars = []; - for (var i = 0; i < sigBytes; i++) { - var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; - hexChars.push((bite >>> 4).toString(16)); - hexChars.push((bite & 0x0f).toString(16)); - } - - return hexChars.join(''); - }, - - /** - * Converts a hex string to a word array. - * - * @param {string} hexStr The hex string. - * - * @return {WordArray} The word array. - * - * @static - * - * @example - * - * var wordArray = CryptoJS.enc.Hex.parse(hexString); - */ - parse: function (hexStr) { - // Shortcut - var hexStrLength = hexStr.length; - - // Convert - var words = []; - for (var i = 0; i < hexStrLength; i += 2) { - words[i >>> 3] |= parseInt(hexStr.substr(i, 2), 16) << (24 - (i % 8) * 4); - } - - return new WordArray.init(words, hexStrLength / 2); - } - }; - - /** - * Latin1 encoding strategy. - */ - var Latin1 = C_enc.Latin1 = { - /** - * Converts a word array to a Latin1 string. - * - * @param {WordArray} wordArray The word array. - * - * @return {string} The Latin1 string. - * - * @static - * - * @example - * - * var latin1String = CryptoJS.enc.Latin1.stringify(wordArray); - */ - stringify: function (wordArray) { - // Shortcuts - var words = wordArray.words; - var sigBytes = wordArray.sigBytes; - - // Convert - var latin1Chars = []; - for (var i = 0; i < sigBytes; i++) { - var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; - latin1Chars.push(String.fromCharCode(bite)); - } - - return latin1Chars.join(''); - }, - - /** - * Converts a Latin1 string to a word array. - * - * @param {string} latin1Str The Latin1 string. - * - * @return {WordArray} The word array. - * - * @static - * - * @example - * - * var wordArray = CryptoJS.enc.Latin1.parse(latin1String); - */ - parse: function (latin1Str) { - // Shortcut - var latin1StrLength = latin1Str.length; - - // Convert - var words = []; - for (var i = 0; i < latin1StrLength; i++) { - words[i >>> 2] |= (latin1Str.charCodeAt(i) & 0xff) << (24 - (i % 4) * 8); - } - - return new WordArray.init(words, latin1StrLength); - } - }; - - /** - * UTF-8 encoding strategy. - */ - var Utf8 = C_enc.Utf8 = { - /** - * Converts a word array to a UTF-8 string. - * - * @param {WordArray} wordArray The word array. - * - * @return {string} The UTF-8 string. - * - * @static - * - * @example - * - * var utf8String = CryptoJS.enc.Utf8.stringify(wordArray); - */ - stringify: function (wordArray) { - try { - return decodeURIComponent(escape(Latin1.stringify(wordArray))); - } catch (e) { - throw new Error('Malformed UTF-8 data'); - } - }, - - /** - * Converts a UTF-8 string to a word array. - * - * @param {string} utf8Str The UTF-8 string. - * - * @return {WordArray} The word array. - * - * @static - * - * @example - * - * var wordArray = CryptoJS.enc.Utf8.parse(utf8String); - */ - parse: function (utf8Str) { - return Latin1.parse(unescape(encodeURIComponent(utf8Str))); - } - }; - - /** - * Abstract buffered block algorithm template. - * - * The property blockSize must be implemented in a concrete subtype. - * - * @property {number} _minBufferSize The number of blocks that should be kept unprocessed in the buffer. Default: 0 - */ - var BufferedBlockAlgorithm = C_lib.BufferedBlockAlgorithm = Base.extend({ - /** - * Resets this block algorithm's data buffer to its initial state. - * - * @example - * - * bufferedBlockAlgorithm.reset(); - */ - reset: function () { - // Initial values - this._data = new WordArray.init(); - this._nDataBytes = 0; - }, - - /** - * Adds new data to this block algorithm's buffer. - * - * @param {WordArray|string} data The data to append. Strings are converted to a WordArray using UTF-8. - * - * @example - * - * bufferedBlockAlgorithm._append('data'); - * bufferedBlockAlgorithm._append(wordArray); - */ - _append: function (data) { - // Convert string to WordArray, else assume WordArray already - if (typeof data == 'string') { - data = Utf8.parse(data); - } - - // Append - this._data.concat(data); - this._nDataBytes += data.sigBytes; - }, - - /** - * Processes available data blocks. - * - * This method invokes _doProcessBlock(offset), which must be implemented by a concrete subtype. - * - * @param {boolean} doFlush Whether all blocks and partial blocks should be processed. - * - * @return {WordArray} The processed data. - * - * @example - * - * var processedData = bufferedBlockAlgorithm._process(); - * var processedData = bufferedBlockAlgorithm._process(!!'flush'); - */ - _process: function (doFlush) { - // Shortcuts - var data = this._data; - var dataWords = data.words; - var dataSigBytes = data.sigBytes; - var blockSize = this.blockSize; - var blockSizeBytes = blockSize * 4; - - // Count blocks ready - var nBlocksReady = dataSigBytes / blockSizeBytes; - if (doFlush) { - // Round up to include partial blocks - nBlocksReady = Math.ceil(nBlocksReady); - } else { - // Round down to include only full blocks, - // less the number of blocks that must remain in the buffer - nBlocksReady = Math.max((nBlocksReady | 0) - this._minBufferSize, 0); - } - - // Count words ready - var nWordsReady = nBlocksReady * blockSize; - - // Count bytes ready - var nBytesReady = Math.min(nWordsReady * 4, dataSigBytes); - - // Process blocks - if (nWordsReady) { - for (var offset = 0; offset < nWordsReady; offset += blockSize) { - // Perform concrete-algorithm logic - this._doProcessBlock(dataWords, offset); - } - - // Remove processed words - var processedWords = dataWords.splice(0, nWordsReady); - data.sigBytes -= nBytesReady; - } - - // Return processed words - return new WordArray.init(processedWords, nBytesReady); - }, - - /** - * Creates a copy of this object. - * - * @return {Object} The clone. - * - * @example - * - * var clone = bufferedBlockAlgorithm.clone(); - */ - clone: function () { - var clone = Base.clone.call(this); - clone._data = this._data.clone(); - - return clone; - }, - - _minBufferSize: 0 - }); - - /** - * Abstract hasher template. - * - * @property {number} blockSize The number of 32-bit words this hasher operates on. Default: 16 (512 bits) - */ - var Hasher = C_lib.Hasher = BufferedBlockAlgorithm.extend({ - /** - * Configuration options. - */ - cfg: Base.extend(), - - /** - * Initializes a newly created hasher. - * - * @param {Object} cfg (Optional) The configuration options to use for this hash computation. - * - * @example - * - * var hasher = CryptoJS.algo.SHA256.create(); - */ - init: function (cfg) { - // Apply config defaults - this.cfg = this.cfg.extend(cfg); - - // Set initial values - this.reset(); - }, - - /** - * Resets this hasher to its initial state. - * - * @example - * - * hasher.reset(); - */ - reset: function () { - // Reset data buffer - BufferedBlockAlgorithm.reset.call(this); - - // Perform concrete-hasher logic - this._doReset(); - }, - - /** - * Updates this hasher with a message. - * - * @param {WordArray|string} messageUpdate The message to append. - * - * @return {Hasher} This hasher. - * - * @example - * - * hasher.update('message'); - * hasher.update(wordArray); - */ - update: function (messageUpdate) { - // Append - this._append(messageUpdate); - - // Update the hash - this._process(); - - // Chainable - return this; - }, - - /** - * Finalizes the hash computation. - * Note that the finalize operation is effectively a destructive, read-once operation. - * - * @param {WordArray|string} messageUpdate (Optional) A final message update. - * - * @return {WordArray} The hash. - * - * @example - * - * var hash = hasher.finalize(); - * var hash = hasher.finalize('message'); - * var hash = hasher.finalize(wordArray); - */ - finalize: function (messageUpdate) { - // Final message update - if (messageUpdate) { - this._append(messageUpdate); - } - - // Perform concrete-hasher logic - var hash = this._doFinalize(); - - return hash; - }, - - blockSize: 512/32, - - /** - * Creates a shortcut function to a hasher's object interface. - * - * @param {Hasher} hasher The hasher to create a helper for. - * - * @return {Function} The shortcut function. - * - * @static - * - * @example - * - * var SHA256 = CryptoJS.lib.Hasher._createHelper(CryptoJS.algo.SHA256); - */ - _createHelper: function (hasher) { - return function (message, cfg) { - return new hasher.init(cfg).finalize(message); - }; - }, - - /** - * Creates a shortcut function to the HMAC's object interface. - * - * @param {Hasher} hasher The hasher to use in this HMAC helper. - * - * @return {Function} The shortcut function. - * - * @static - * - * @example - * - * var HmacSHA256 = CryptoJS.lib.Hasher._createHmacHelper(CryptoJS.algo.SHA256); - */ - _createHmacHelper: function (hasher) { - return function (message, key) { - return new C_algo.HMAC.init(hasher, key).finalize(message); - }; - } - }); - - /** - * Algorithm namespace. - */ - var C_algo = C.algo = {}; - - return C; - }(Math)); - - - return CryptoJS; - -})); -},{}],30:[function(require,module,exports){ -;(function (root, factory, undef) { - if (typeof exports === "object") { - // CommonJS - module.exports = exports = factory(require("./core"), require("./x64-core")); - } - else if (typeof define === "function" && define.amd) { - // AMD - define(["./core", "./x64-core"], factory); - } - else { - // Global (browser) - factory(root.CryptoJS); - } -}(this, function (CryptoJS) { - - (function (Math) { - // Shortcuts - var C = CryptoJS; - var C_lib = C.lib; - var WordArray = C_lib.WordArray; - var Hasher = C_lib.Hasher; - var C_x64 = C.x64; - var X64Word = C_x64.Word; - var C_algo = C.algo; - - // Constants tables - var RHO_OFFSETS = []; - var PI_INDEXES = []; - var ROUND_CONSTANTS = []; - - // Compute Constants - (function () { - // Compute rho offset constants - var x = 1, y = 0; - for (var t = 0; t < 24; t++) { - RHO_OFFSETS[x + 5 * y] = ((t + 1) * (t + 2) / 2) % 64; - - var newX = y % 5; - var newY = (2 * x + 3 * y) % 5; - x = newX; - y = newY; - } - - // Compute pi index constants - for (var x = 0; x < 5; x++) { - for (var y = 0; y < 5; y++) { - PI_INDEXES[x + 5 * y] = y + ((2 * x + 3 * y) % 5) * 5; - } - } - - // Compute round constants - var LFSR = 0x01; - for (var i = 0; i < 24; i++) { - var roundConstantMsw = 0; - var roundConstantLsw = 0; - - for (var j = 0; j < 7; j++) { - if (LFSR & 0x01) { - var bitPosition = (1 << j) - 1; - if (bitPosition < 32) { - roundConstantLsw ^= 1 << bitPosition; - } else /* if (bitPosition >= 32) */ { - roundConstantMsw ^= 1 << (bitPosition - 32); - } - } - - // Compute next LFSR - if (LFSR & 0x80) { - // Primitive polynomial over GF(2): x^8 + x^6 + x^5 + x^4 + 1 - LFSR = (LFSR << 1) ^ 0x71; - } else { - LFSR <<= 1; - } - } - - ROUND_CONSTANTS[i] = X64Word.create(roundConstantMsw, roundConstantLsw); - } - }()); - - // Reusable objects for temporary values - var T = []; - (function () { - for (var i = 0; i < 25; i++) { - T[i] = X64Word.create(); - } - }()); - - /** - * SHA-3 hash algorithm. - */ - var SHA3 = C_algo.SHA3 = Hasher.extend({ - /** - * Configuration options. - * - * @property {number} outputLength - * The desired number of bits in the output hash. - * Only values permitted are: 224, 256, 384, 512. - * Default: 512 - */ - cfg: Hasher.cfg.extend({ - outputLength: 512 - }), - - _doReset: function () { - var state = this._state = [] - for (var i = 0; i < 25; i++) { - state[i] = new X64Word.init(); - } - - this.blockSize = (1600 - 2 * this.cfg.outputLength) / 32; - }, - - _doProcessBlock: function (M, offset) { - // Shortcuts - var state = this._state; - var nBlockSizeLanes = this.blockSize / 2; - - // Absorb - for (var i = 0; i < nBlockSizeLanes; i++) { - // Shortcuts - var M2i = M[offset + 2 * i]; - var M2i1 = M[offset + 2 * i + 1]; - - // Swap endian - M2i = ( - (((M2i << 8) | (M2i >>> 24)) & 0x00ff00ff) | - (((M2i << 24) | (M2i >>> 8)) & 0xff00ff00) - ); - M2i1 = ( - (((M2i1 << 8) | (M2i1 >>> 24)) & 0x00ff00ff) | - (((M2i1 << 24) | (M2i1 >>> 8)) & 0xff00ff00) - ); - - // Absorb message into state - var lane = state[i]; - lane.high ^= M2i1; - lane.low ^= M2i; - } - - // Rounds - for (var round = 0; round < 24; round++) { - // Theta - for (var x = 0; x < 5; x++) { - // Mix column lanes - var tMsw = 0, tLsw = 0; - for (var y = 0; y < 5; y++) { - var lane = state[x + 5 * y]; - tMsw ^= lane.high; - tLsw ^= lane.low; - } - - // Temporary values - var Tx = T[x]; - Tx.high = tMsw; - Tx.low = tLsw; - } - for (var x = 0; x < 5; x++) { - // Shortcuts - var Tx4 = T[(x + 4) % 5]; - var Tx1 = T[(x + 1) % 5]; - var Tx1Msw = Tx1.high; - var Tx1Lsw = Tx1.low; - - // Mix surrounding columns - var tMsw = Tx4.high ^ ((Tx1Msw << 1) | (Tx1Lsw >>> 31)); - var tLsw = Tx4.low ^ ((Tx1Lsw << 1) | (Tx1Msw >>> 31)); - for (var y = 0; y < 5; y++) { - var lane = state[x + 5 * y]; - lane.high ^= tMsw; - lane.low ^= tLsw; - } - } - - // Rho Pi - for (var laneIndex = 1; laneIndex < 25; laneIndex++) { - // Shortcuts - var lane = state[laneIndex]; - var laneMsw = lane.high; - var laneLsw = lane.low; - var rhoOffset = RHO_OFFSETS[laneIndex]; - - // Rotate lanes - if (rhoOffset < 32) { - var tMsw = (laneMsw << rhoOffset) | (laneLsw >>> (32 - rhoOffset)); - var tLsw = (laneLsw << rhoOffset) | (laneMsw >>> (32 - rhoOffset)); - } else /* if (rhoOffset >= 32) */ { - var tMsw = (laneLsw << (rhoOffset - 32)) | (laneMsw >>> (64 - rhoOffset)); - var tLsw = (laneMsw << (rhoOffset - 32)) | (laneLsw >>> (64 - rhoOffset)); - } - - // Transpose lanes - var TPiLane = T[PI_INDEXES[laneIndex]]; - TPiLane.high = tMsw; - TPiLane.low = tLsw; - } - - // Rho pi at x = y = 0 - var T0 = T[0]; - var state0 = state[0]; - T0.high = state0.high; - T0.low = state0.low; - - // Chi - for (var x = 0; x < 5; x++) { - for (var y = 0; y < 5; y++) { - // Shortcuts - var laneIndex = x + 5 * y; - var lane = state[laneIndex]; - var TLane = T[laneIndex]; - var Tx1Lane = T[((x + 1) % 5) + 5 * y]; - var Tx2Lane = T[((x + 2) % 5) + 5 * y]; - - // Mix rows - lane.high = TLane.high ^ (~Tx1Lane.high & Tx2Lane.high); - lane.low = TLane.low ^ (~Tx1Lane.low & Tx2Lane.low); - } - } - - // Iota - var lane = state[0]; - var roundConstant = ROUND_CONSTANTS[round]; - lane.high ^= roundConstant.high; - lane.low ^= roundConstant.low;; - } - }, - - _doFinalize: function () { - // Shortcuts - var data = this._data; - var dataWords = data.words; - var nBitsTotal = this._nDataBytes * 8; - var nBitsLeft = data.sigBytes * 8; - var blockSizeBits = this.blockSize * 32; - - // Add padding - dataWords[nBitsLeft >>> 5] |= 0x1 << (24 - nBitsLeft % 32); - dataWords[((Math.ceil((nBitsLeft + 1) / blockSizeBits) * blockSizeBits) >>> 5) - 1] |= 0x80; - data.sigBytes = dataWords.length * 4; - - // Hash final blocks - this._process(); - - // Shortcuts - var state = this._state; - var outputLengthBytes = this.cfg.outputLength / 8; - var outputLengthLanes = outputLengthBytes / 8; - - // Squeeze - var hashWords = []; - for (var i = 0; i < outputLengthLanes; i++) { - // Shortcuts - var lane = state[i]; - var laneMsw = lane.high; - var laneLsw = lane.low; - - // Swap endian - laneMsw = ( - (((laneMsw << 8) | (laneMsw >>> 24)) & 0x00ff00ff) | - (((laneMsw << 24) | (laneMsw >>> 8)) & 0xff00ff00) - ); - laneLsw = ( - (((laneLsw << 8) | (laneLsw >>> 24)) & 0x00ff00ff) | - (((laneLsw << 24) | (laneLsw >>> 8)) & 0xff00ff00) - ); - - // Squeeze state to retrieve hash - hashWords.push(laneLsw); - hashWords.push(laneMsw); - } - - // Return final computed hash - return new WordArray.init(hashWords, outputLengthBytes); - }, - - clone: function () { - var clone = Hasher.clone.call(this); - - var state = clone._state = this._state.slice(0); - for (var i = 0; i < 25; i++) { - state[i] = state[i].clone(); - } - - return clone; - } - }); - - /** - * Shortcut function to the hasher's object interface. - * - * @param {WordArray|string} message The message to hash. - * - * @return {WordArray} The hash. - * - * @static - * - * @example - * - * var hash = CryptoJS.SHA3('message'); - * var hash = CryptoJS.SHA3(wordArray); - */ - C.SHA3 = Hasher._createHelper(SHA3); - - /** - * Shortcut function to the HMAC's object interface. - * - * @param {WordArray|string} message The message to hash. - * @param {WordArray|string} key The secret key. - * - * @return {WordArray} The HMAC. - * - * @static - * - * @example - * - * var hmac = CryptoJS.HmacSHA3(message, key); - */ - C.HmacSHA3 = Hasher._createHmacHelper(SHA3); - }(Math)); - - - return CryptoJS.SHA3; - -})); -},{"./core":29,"./x64-core":31}],31:[function(require,module,exports){ -;(function (root, factory) { - if (typeof exports === "object") { - // CommonJS - module.exports = exports = factory(require("./core")); - } - else if (typeof define === "function" && define.amd) { - // AMD - define(["./core"], factory); - } - else { - // Global (browser) - factory(root.CryptoJS); - } -}(this, function (CryptoJS) { - - (function (undefined) { - // Shortcuts - var C = CryptoJS; - var C_lib = C.lib; - var Base = C_lib.Base; - var X32WordArray = C_lib.WordArray; - - /** - * x64 namespace. - */ - var C_x64 = C.x64 = {}; - - /** - * A 64-bit word. - */ - var X64Word = C_x64.Word = Base.extend({ - /** - * Initializes a newly created 64-bit word. - * - * @param {number} high The high 32 bits. - * @param {number} low The low 32 bits. - * - * @example - * - * var x64Word = CryptoJS.x64.Word.create(0x00010203, 0x04050607); - */ - init: function (high, low) { - this.high = high; - this.low = low; - } - - /** - * Bitwise NOTs this word. - * - * @return {X64Word} A new x64-Word object after negating. - * - * @example - * - * var negated = x64Word.not(); - */ - // not: function () { - // var high = ~this.high; - // var low = ~this.low; - - // return X64Word.create(high, low); - // }, - - /** - * Bitwise ANDs this word with the passed word. - * - * @param {X64Word} word The x64-Word to AND with this word. - * - * @return {X64Word} A new x64-Word object after ANDing. - * - * @example - * - * var anded = x64Word.and(anotherX64Word); - */ - // and: function (word) { - // var high = this.high & word.high; - // var low = this.low & word.low; - - // return X64Word.create(high, low); - // }, - - /** - * Bitwise ORs this word with the passed word. - * - * @param {X64Word} word The x64-Word to OR with this word. - * - * @return {X64Word} A new x64-Word object after ORing. - * - * @example - * - * var ored = x64Word.or(anotherX64Word); - */ - // or: function (word) { - // var high = this.high | word.high; - // var low = this.low | word.low; - - // return X64Word.create(high, low); - // }, - - /** - * Bitwise XORs this word with the passed word. - * - * @param {X64Word} word The x64-Word to XOR with this word. - * - * @return {X64Word} A new x64-Word object after XORing. - * - * @example - * - * var xored = x64Word.xor(anotherX64Word); - */ - // xor: function (word) { - // var high = this.high ^ word.high; - // var low = this.low ^ word.low; - - // return X64Word.create(high, low); - // }, - - /** - * Shifts this word n bits to the left. - * - * @param {number} n The number of bits to shift. - * - * @return {X64Word} A new x64-Word object after shifting. - * - * @example - * - * var shifted = x64Word.shiftL(25); - */ - // shiftL: function (n) { - // if (n < 32) { - // var high = (this.high << n) | (this.low >>> (32 - n)); - // var low = this.low << n; - // } else { - // var high = this.low << (n - 32); - // var low = 0; - // } - - // return X64Word.create(high, low); - // }, - - /** - * Shifts this word n bits to the right. - * - * @param {number} n The number of bits to shift. - * - * @return {X64Word} A new x64-Word object after shifting. - * - * @example - * - * var shifted = x64Word.shiftR(7); - */ - // shiftR: function (n) { - // if (n < 32) { - // var low = (this.low >>> n) | (this.high << (32 - n)); - // var high = this.high >>> n; - // } else { - // var low = this.high >>> (n - 32); - // var high = 0; - // } - - // return X64Word.create(high, low); - // }, - - /** - * Rotates this word n bits to the left. - * - * @param {number} n The number of bits to rotate. - * - * @return {X64Word} A new x64-Word object after rotating. - * - * @example - * - * var rotated = x64Word.rotL(25); - */ - // rotL: function (n) { - // return this.shiftL(n).or(this.shiftR(64 - n)); - // }, - - /** - * Rotates this word n bits to the right. - * - * @param {number} n The number of bits to rotate. - * - * @return {X64Word} A new x64-Word object after rotating. - * - * @example - * - * var rotated = x64Word.rotR(7); - */ - // rotR: function (n) { - // return this.shiftR(n).or(this.shiftL(64 - n)); - // }, - - /** - * Adds this word with the passed word. - * - * @param {X64Word} word The x64-Word to add with this word. - * - * @return {X64Word} A new x64-Word object after adding. - * - * @example - * - * var added = x64Word.add(anotherX64Word); - */ - // add: function (word) { - // var low = (this.low + word.low) | 0; - // var carry = (low >>> 0) < (this.low >>> 0) ? 1 : 0; - // var high = (this.high + word.high + carry) | 0; - - // return X64Word.create(high, low); - // } - }); - - /** - * An array of 64-bit words. - * - * @property {Array} words The array of CryptoJS.x64.Word objects. - * @property {number} sigBytes The number of significant bytes in this word array. - */ - var X64WordArray = C_x64.WordArray = Base.extend({ - /** - * Initializes a newly created word array. - * - * @param {Array} words (Optional) An array of CryptoJS.x64.Word objects. - * @param {number} sigBytes (Optional) The number of significant bytes in the words. - * - * @example - * - * var wordArray = CryptoJS.x64.WordArray.create(); - * - * var wordArray = CryptoJS.x64.WordArray.create([ - * CryptoJS.x64.Word.create(0x00010203, 0x04050607), - * CryptoJS.x64.Word.create(0x18191a1b, 0x1c1d1e1f) - * ]); - * - * var wordArray = CryptoJS.x64.WordArray.create([ - * CryptoJS.x64.Word.create(0x00010203, 0x04050607), - * CryptoJS.x64.Word.create(0x18191a1b, 0x1c1d1e1f) - * ], 10); - */ - init: function (words, sigBytes) { - words = this.words = words || []; - - if (sigBytes != undefined) { - this.sigBytes = sigBytes; - } else { - this.sigBytes = words.length * 8; - } - }, - - /** - * Converts this 64-bit word array to a 32-bit word array. - * - * @return {CryptoJS.lib.WordArray} This word array's data as a 32-bit word array. - * - * @example - * - * var x32WordArray = x64WordArray.toX32(); - */ - toX32: function () { - // Shortcuts - var x64Words = this.words; - var x64WordsLength = x64Words.length; - - // Convert - var x32Words = []; - for (var i = 0; i < x64WordsLength; i++) { - var x64Word = x64Words[i]; - x32Words.push(x64Word.high); - x32Words.push(x64Word.low); - } - - return X32WordArray.create(x32Words, this.sigBytes); - }, - - /** - * Creates a copy of this word array. - * - * @return {X64WordArray} The clone. - * - * @example - * - * var clone = x64WordArray.clone(); - */ - clone: function () { - var clone = Base.clone.call(this); - - // Clone "words" array - var words = clone.words = this.words.slice(0); - - // Clone each X64Word object - var wordsLength = words.length; - for (var i = 0; i < wordsLength; i++) { - words[i] = words[i].clone(); - } - - return clone; - } - }); - }()); - - - return CryptoJS; - -})); -},{"./core":29}],"bignumber.js":[function(require,module,exports){ +},{"./method":20}],27:[function(require,module,exports){ + +},{}],"bignumber.js":[function(require,module,exports){ 'use strict'; module.exports = BigNumber; // jshint ignore:line @@ -5272,7 +3872,7 @@ if (typeof window !== 'undefined' && typeof window.web3 === 'undefined') { module.exports = web3; -},{"./lib/web3":9,"./lib/web3/contract":11,"./lib/web3/httpprovider":19,"./lib/web3/qtsync":24}]},{},["web3"]) +},{"./lib/web3":8,"./lib/web3/contract":10,"./lib/web3/httpprovider":18,"./lib/web3/qtsync":23}]},{},["web3"]) //# sourceMappingURL=web3-light.js.map From 9138955ba534ec074ecf55c4d02e2f3e72b29faa Mon Sep 17 00:00:00 2001 From: Gustav Simonsson Date: Thu, 28 May 2015 15:20:05 +0200 Subject: [PATCH 28/34] Validate account length and avoid slicing in logging --- cmd/geth/main.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/geth/main.go b/cmd/geth/main.go index eecd7546a8..f82a121e76 100644 --- a/cmd/geth/main.go +++ b/cmd/geth/main.go @@ -340,13 +340,13 @@ func unlockAccount(ctx *cli.Context, am *accounts.Manager, account string) (pass var err error // Load startup keys. XXX we are going to need a different format - if len(account) == 0 { + if !((len(account) == 40) || (len(account) == 42)) { // with or without 0x utils.Fatalf("Invalid account address '%s'", account) } // Attempt to unlock the account 3 times attempts := 3 for tries := 0; tries < attempts; tries++ { - msg := fmt.Sprintf("Unlocking account %s...%s | Attempt %d/%d", account[:8], account[len(account)-6:], tries+1, attempts) + msg := fmt.Sprintf("Unlocking account %s | Attempt %d/%d", account, tries+1, attempts) passphrase = getPassPhrase(ctx, msg, false) err = am.Unlock(common.HexToAddress(account), passphrase) if err == nil { From 75f5ae80fd8a63d3e21462bcb7c338f44cd2b6e8 Mon Sep 17 00:00:00 2001 From: obscuren Date: Thu, 28 May 2015 15:35:50 +0200 Subject: [PATCH 29/34] core: partially removed nonce parallelisation and added merge error chk Invalid forks are now detected Current setup of parellelisation actually inserts bad blocks. This fix is tmp until a better one is found --- core/chain_manager.go | 80 ++++++++++++++++++++++++++++++------------- 1 file changed, 56 insertions(+), 24 deletions(-) diff --git a/core/chain_manager.go b/core/chain_manager.go index edf8825f35..24ef2c0abb 100644 --- a/core/chain_manager.go +++ b/core/chain_manager.go @@ -548,18 +548,21 @@ func (self *ChainManager) InsertChain(chain types.Blocks) (int, error) { tstart = time.Now() ) - // check the nonce in parallel to the block processing - // this speeds catching up significantly - nonceErrCh := make(chan error) - go func() { - nonceErrCh <- verifyNonces(self.pow, chain) - }() - for i, block := range chain { if block == nil { continue } + if BadHashes[block.Hash()] { + err := fmt.Errorf("Found known bad hash in chain %x", block.Hash()) + blockErr(block, err) + return i, err + } + + // create a nonce channel for parallisation of the nonce check + nonceErrCh := make(chan error) + go verifyBlockNonce(self.pow, block, nonceErrCh) + // Setting block.Td regardless of error (known for example) prevents errors down the line // in the protocol handler block.Td = new(big.Int).Set(CalcTD(block, self.GetBlock(block.ParentHash()))) @@ -568,13 +571,14 @@ func (self *ChainManager) InsertChain(chain types.Blocks) (int, error) { // all others will fail too (unless a known block is returned). logs, err := self.processor.Process(block) if err != nil { + // empty the nonce channel + <-nonceErrCh + if IsKnownBlockErr(err) { stats.ignored++ continue } - // Do not penelise on future block. We'll need a block queue eventually that will queue - // future block for future use if err == BlockFutureErr { block.SetQueued(true) self.futureBlocks.Push(block) @@ -593,18 +597,23 @@ func (self *ChainManager) InsertChain(chain types.Blocks) (int, error) { return i, err } + // Wait and check nonce channel and make sure it checks out fine + // otherwise return the error + if err := <-nonceErrCh; err != nil { + return i, err + } cblock := self.currentBlock - // Write block to database. Eventually we'll have to improve on this and throw away blocks that are - // not in the canonical chain. - self.write(block) // Compare the TD of the last known block in the canonical chain to make sure it's greater. // At this point it's possible that a different chain (fork) becomes the new canonical chain. if block.Td.Cmp(self.td) > 0 { // chain fork if block.ParentHash() != cblock.Hash() { // during split we merge two different chains and create the new canonical chain - self.merge(cblock, block) + err := self.merge(cblock, block) + if err != nil { + return i, err + } queue[i] = ChainSplitEvent{block, logs} queueEvent.splitCount++ @@ -637,19 +646,16 @@ func (self *ChainManager) InsertChain(chain types.Blocks) (int, error) { queue[i] = ChainSideEvent{block, logs} queueEvent.sideCount++ } + // Write block to database. Eventually we'll have to improve on this and throw away blocks that are + // not in the canonical chain. + self.write(block) + // Delete from future blocks self.futureBlocks.Delete(block.Hash()) stats.processed++ } - // check and wait for the nonce error channel and - // make sure no nonce error was thrown in the process - err := <-nonceErrCh - if err != nil { - return 0, err - } - if (stats.queued > 0 || stats.processed > 0 || stats.ignored > 0) && bool(glog.V(logger.Info)) { tend := time.Since(tstart) start, end := chain[0], chain[len(chain)-1] @@ -663,7 +669,7 @@ func (self *ChainManager) InsertChain(chain types.Blocks) (int, error) { // diff takes two blocks, an old chain and a new chain and will reconstruct the blocks and inserts them // to be part of the new canonical chain. -func (self *ChainManager) diff(oldBlock, newBlock *types.Block) types.Blocks { +func (self *ChainManager) diff(oldBlock, newBlock *types.Block) (types.Blocks, error) { var ( newChain types.Blocks commonBlock *types.Block @@ -675,10 +681,17 @@ func (self *ChainManager) diff(oldBlock, newBlock *types.Block) types.Blocks { if oldBlock.NumberU64() > newBlock.NumberU64() { // reduce old chain for oldBlock = oldBlock; oldBlock.NumberU64() != newBlock.NumberU64(); oldBlock = self.GetBlock(oldBlock.ParentHash()) { + if oldBlock == nil { + return nil, fmt.Errorf("Invalid old chain") + } } } else { // reduce new chain and append new chain blocks for inserting later on for newBlock = newBlock; newBlock.NumberU64() != oldBlock.NumberU64(); newBlock = self.GetBlock(newBlock.ParentHash()) { + if newBlock == nil { + return nil, fmt.Errorf("Invalid new chain") + } + newChain = append(newChain, newBlock) } } @@ -692,6 +705,12 @@ func (self *ChainManager) diff(oldBlock, newBlock *types.Block) types.Blocks { newChain = append(newChain, newBlock) oldBlock, newBlock = self.GetBlock(oldBlock.ParentHash()), self.GetBlock(newBlock.ParentHash()) + if oldBlock == nil { + return nil, fmt.Errorf("Invalid old chain") + } + if newBlock == nil { + return nil, fmt.Errorf("Invalid new chain") + } } if glog.V(logger.Info) { @@ -699,17 +718,22 @@ func (self *ChainManager) diff(oldBlock, newBlock *types.Block) types.Blocks { glog.Infof("Fork detected @ %x. Reorganising chain from #%v %x to %x", commonHash[:4], numSplit, oldStart.Hash().Bytes()[:4], newStart.Hash().Bytes()[:4]) } - return newChain + return newChain, nil } // merge merges two different chain to the new canonical chain -func (self *ChainManager) merge(oldBlock, newBlock *types.Block) { - newChain := self.diff(oldBlock, newBlock) +func (self *ChainManager) merge(oldBlock, newBlock *types.Block) error { + newChain, err := self.diff(oldBlock, newBlock) + if err != nil { + return fmt.Errorf("chain reorg failed: %v", err) + } // insert blocks. Order does not matter. Last block will be written in ImportChain itself which creates the new head properly for _, block := range newChain { self.insert(block) } + + return nil } func (self *ChainManager) update() { @@ -808,3 +832,11 @@ func verifyNonce(pow pow.PoW, in <-chan *types.Block, done chan<- error) { } } } + +func verifyBlockNonce(pow pow.PoW, block *types.Block, done chan<- error) { + if !pow.Verify(block) { + done <- ValidationError("Block(#%v) nonce is invalid (= %x)", block.Number(), block.Nonce) + } else { + done <- nil + } +} From 109f9956848c83cf7683e123ab5f2358371d9e37 Mon Sep 17 00:00:00 2001 From: obscuren Date: Thu, 28 May 2015 15:46:36 +0200 Subject: [PATCH 30/34] core: log block hash during nonce error --- core/chain_manager.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/chain_manager.go b/core/chain_manager.go index 24ef2c0abb..86e90a8153 100644 --- a/core/chain_manager.go +++ b/core/chain_manager.go @@ -826,7 +826,7 @@ func verifyNonces(pow pow.PoW, blocks []*types.Block) error { func verifyNonce(pow pow.PoW, in <-chan *types.Block, done chan<- error) { for block := range in { if !pow.Verify(block) { - done <- ValidationError("Block(#%v) nonce is invalid (= %x)", block.Number(), block.Nonce) + done <- ValidationError("Block (#%v / %x) nonce is invalid (= %x)", block.Number(), block.Hash(), block.Nonce) } else { done <- nil } @@ -835,7 +835,7 @@ func verifyNonce(pow pow.PoW, in <-chan *types.Block, done chan<- error) { func verifyBlockNonce(pow pow.PoW, block *types.Block, done chan<- error) { if !pow.Verify(block) { - done <- ValidationError("Block(#%v) nonce is invalid (= %x)", block.Number(), block.Nonce) + done <- ValidationError("Block (#%v / %x) nonce is invalid (= %x)", block.Number(), block.Hash(), block.Nonce) } else { done <- nil } From 16038b4e671da2c060beded58127448fb3d756a6 Mon Sep 17 00:00:00 2001 From: obscuren Date: Thu, 28 May 2015 16:26:19 +0200 Subject: [PATCH 31/34] core: added bad block --- core/blocks.go | 1 + 1 file changed, 1 insertion(+) diff --git a/core/blocks.go b/core/blocks.go index 35e170af32..83727ff62e 100644 --- a/core/blocks.go +++ b/core/blocks.go @@ -6,4 +6,5 @@ import "github.com/ethereum/go-ethereum/common" var BadHashes = map[common.Hash]bool{ common.HexToHash("f269c503aed286caaa0d114d6a5320e70abbc2febe37953207e76a2873f2ba79"): true, common.HexToHash("38f5bbbffd74804820ffa4bab0cd540e9de229725afb98c1a7e57936f4a714bc"): true, + common.HexToHash("7064455b364775a16afbdecd75370e912c6e2879f202eda85b9beae547fff3ac"): true, } From e5d7627427262f7df0906233aecbcbb37c8daeed Mon Sep 17 00:00:00 2001 From: obscuren Date: Thu, 28 May 2015 17:01:44 +0200 Subject: [PATCH 32/34] eth: 100% block propogation --- eth/handler.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/eth/handler.go b/eth/handler.go index 8092a5f714..aea33452ca 100644 --- a/eth/handler.go +++ b/eth/handler.go @@ -2,7 +2,6 @@ package eth import ( "fmt" - "math" "math/big" "sync" "time" @@ -359,7 +358,7 @@ func (pm *ProtocolManager) verifyTd(peer *peer, request newBlockMsgData) error { func (pm *ProtocolManager) BroadcastBlock(hash common.Hash, block *types.Block) { // Broadcast block to a batch of peers not knowing about it peers := pm.peers.PeersWithoutBlock(hash) - peers = peers[:int(math.Sqrt(float64(len(peers))))] + //peers = peers[:int(math.Sqrt(float64(len(peers))))] for _, peer := range peers { peer.sendNewBlock(block) } From 35806ccc1c604a39893771303d21b384b2aa0c87 Mon Sep 17 00:00:00 2001 From: obscuren Date: Thu, 28 May 2015 17:18:13 +0200 Subject: [PATCH 33/34] build server fix --- miner/worker.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/miner/worker.go b/miner/worker.go index bc69551696..58efd61db7 100644 --- a/miner/worker.go +++ b/miner/worker.go @@ -289,6 +289,10 @@ func (self *worker) push() { func (self *worker) makeCurrent() { block := self.chain.NewBlock(self.coinbase) parent := self.chain.GetBlock(block.ParentHash()) + // TMP fix for build server ... + if parent == nil { + return + } if block.Time() <= parent.Time() { block.Header().Time = parent.Header().Time + 1 From d51d74eb55535db7670ad336d186ea64c6a2ff81 Mon Sep 17 00:00:00 2001 From: obscuren Date: Thu, 28 May 2015 17:43:05 +0200 Subject: [PATCH 34/34] cmd/geth: bump version v0.9.26 --- cmd/geth/main.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/geth/main.go b/cmd/geth/main.go index f82a121e76..ab46fdd3e2 100644 --- a/cmd/geth/main.go +++ b/cmd/geth/main.go @@ -44,7 +44,7 @@ import ( const ( ClientIdentifier = "Geth" - Version = "0.9.25" + Version = "0.9.26" ) var (