|
|
@ -40,11 +40,6 @@ import ( |
|
|
|
"gopkg.in/urfave/cli.v1" |
|
|
|
"gopkg.in/urfave/cli.v1" |
|
|
|
) |
|
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
var ( |
|
|
|
|
|
|
|
// secureKeyPrefix is the database key prefix used to store trie node preimages.
|
|
|
|
|
|
|
|
secureKeyPrefix = []byte("secure-key-") |
|
|
|
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
var ( |
|
|
|
var ( |
|
|
|
initCommand = cli.Command{ |
|
|
|
initCommand = cli.Command{ |
|
|
|
Action: utils.MigrateFlags(initGenesis), |
|
|
|
Action: utils.MigrateFlags(initGenesis), |
|
|
@ -100,6 +95,34 @@ Requires a first argument of the file to write to. |
|
|
|
Optional second and third arguments control the first and |
|
|
|
Optional second and third arguments control the first and |
|
|
|
last block to write. In this mode, the file will be appended |
|
|
|
last block to write. In this mode, the file will be appended |
|
|
|
if already existing.`, |
|
|
|
if already existing.`, |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
importPreimagesCommand = cli.Command{ |
|
|
|
|
|
|
|
Action: utils.MigrateFlags(importPreimages), |
|
|
|
|
|
|
|
Name: "import-preimages", |
|
|
|
|
|
|
|
Usage: "Import the preimage database from an RLP stream", |
|
|
|
|
|
|
|
ArgsUsage: "<datafile>", |
|
|
|
|
|
|
|
Flags: []cli.Flag{ |
|
|
|
|
|
|
|
utils.DataDirFlag, |
|
|
|
|
|
|
|
utils.CacheFlag, |
|
|
|
|
|
|
|
utils.LightModeFlag, |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
Category: "BLOCKCHAIN COMMANDS", |
|
|
|
|
|
|
|
Description: ` |
|
|
|
|
|
|
|
The import-preimages command imports hash preimages from an RLP encoded stream.`, |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
exportPreimagesCommand = cli.Command{ |
|
|
|
|
|
|
|
Action: utils.MigrateFlags(exportPreimages), |
|
|
|
|
|
|
|
Name: "export-preimages", |
|
|
|
|
|
|
|
Usage: "Export the preimage database into an RLP stream", |
|
|
|
|
|
|
|
ArgsUsage: "<dumpfile>", |
|
|
|
|
|
|
|
Flags: []cli.Flag{ |
|
|
|
|
|
|
|
utils.DataDirFlag, |
|
|
|
|
|
|
|
utils.CacheFlag, |
|
|
|
|
|
|
|
utils.LightModeFlag, |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
Category: "BLOCKCHAIN COMMANDS", |
|
|
|
|
|
|
|
Description: ` |
|
|
|
|
|
|
|
The export-preimages command export hash preimages to an RLP encoded stream`, |
|
|
|
} |
|
|
|
} |
|
|
|
copydbCommand = cli.Command{ |
|
|
|
copydbCommand = cli.Command{ |
|
|
|
Action: utils.MigrateFlags(copyDb), |
|
|
|
Action: utils.MigrateFlags(copyDb), |
|
|
@ -146,34 +169,6 @@ Remove blockchain and state databases`, |
|
|
|
The arguments are interpreted as block numbers or hashes. |
|
|
|
The arguments are interpreted as block numbers or hashes. |
|
|
|
Use "ethereum dump 0" to dump the genesis block.`, |
|
|
|
Use "ethereum dump 0" to dump the genesis block.`, |
|
|
|
} |
|
|
|
} |
|
|
|
preimageDumpCommand = cli.Command{ |
|
|
|
|
|
|
|
Action: utils.MigrateFlags(dumpPreimage), |
|
|
|
|
|
|
|
Name: "preimagedump", |
|
|
|
|
|
|
|
Usage: "Dump the preimage database in json format", |
|
|
|
|
|
|
|
ArgsUsage: "<dumpfile>", |
|
|
|
|
|
|
|
Flags: []cli.Flag{ |
|
|
|
|
|
|
|
utils.DataDirFlag, |
|
|
|
|
|
|
|
utils.CacheFlag, |
|
|
|
|
|
|
|
utils.LightModeFlag, |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
Category: "BLOCKCHAIN COMMANDS", |
|
|
|
|
|
|
|
Description: ` |
|
|
|
|
|
|
|
Dump the preimage database in json format`, |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
preimageImportCommand = cli.Command{ |
|
|
|
|
|
|
|
Action: utils.MigrateFlags(importPreimage), |
|
|
|
|
|
|
|
Name: "preimageimport", |
|
|
|
|
|
|
|
Usage: "Import the preimage data from the specified file", |
|
|
|
|
|
|
|
ArgsUsage: "<datafile>", |
|
|
|
|
|
|
|
Flags: []cli.Flag{ |
|
|
|
|
|
|
|
utils.DataDirFlag, |
|
|
|
|
|
|
|
utils.CacheFlag, |
|
|
|
|
|
|
|
utils.LightModeFlag, |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
Category: "BLOCKCHAIN COMMANDS", |
|
|
|
|
|
|
|
Description: ` |
|
|
|
|
|
|
|
Import the preimage data from the specified file`, |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
) |
|
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
// initGenesis will initialise the given JSON format genesis file and writes it as
|
|
|
|
// initGenesis will initialise the given JSON format genesis file and writes it as
|
|
|
@ -332,7 +327,39 @@ func exportChain(ctx *cli.Context) error { |
|
|
|
if err != nil { |
|
|
|
if err != nil { |
|
|
|
utils.Fatalf("Export error: %v\n", err) |
|
|
|
utils.Fatalf("Export error: %v\n", err) |
|
|
|
} |
|
|
|
} |
|
|
|
fmt.Printf("Export done in %v", time.Since(start)) |
|
|
|
fmt.Printf("Export done in %v\n", time.Since(start)) |
|
|
|
|
|
|
|
return nil |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// importPreimages imports preimage data from the specified file.
|
|
|
|
|
|
|
|
func importPreimages(ctx *cli.Context) error { |
|
|
|
|
|
|
|
if len(ctx.Args()) < 1 { |
|
|
|
|
|
|
|
utils.Fatalf("This command requires an argument.") |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
stack := makeFullNode(ctx) |
|
|
|
|
|
|
|
diskdb := utils.MakeChainDatabase(ctx, stack).(*ethdb.LDBDatabase) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
start := time.Now() |
|
|
|
|
|
|
|
if err := utils.ImportPreimages(diskdb, ctx.Args().First()); err != nil { |
|
|
|
|
|
|
|
utils.Fatalf("Export error: %v\n", err) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
fmt.Printf("Export done in %v\n", time.Since(start)) |
|
|
|
|
|
|
|
return nil |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// exportPreimages dumps the preimage data to specified json file in streaming way.
|
|
|
|
|
|
|
|
func exportPreimages(ctx *cli.Context) error { |
|
|
|
|
|
|
|
if len(ctx.Args()) < 1 { |
|
|
|
|
|
|
|
utils.Fatalf("This command requires an argument.") |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
stack := makeFullNode(ctx) |
|
|
|
|
|
|
|
diskdb := utils.MakeChainDatabase(ctx, stack).(*ethdb.LDBDatabase) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
start := time.Now() |
|
|
|
|
|
|
|
if err := utils.ExportPreimages(diskdb, ctx.Args().First()); err != nil { |
|
|
|
|
|
|
|
utils.Fatalf("Export error: %v\n", err) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
fmt.Printf("Export done in %v\n", time.Since(start)) |
|
|
|
return nil |
|
|
|
return nil |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -439,86 +466,6 @@ func dump(ctx *cli.Context) error { |
|
|
|
return nil |
|
|
|
return nil |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
// PreimageEntry represents a map between preimage and hash.
|
|
|
|
|
|
|
|
type PreimageEntry struct { |
|
|
|
|
|
|
|
Hash string `json:"hash"` |
|
|
|
|
|
|
|
Preimage string `json:"preimage"` |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// dumpPreimage dumps the preimage data to specified json file in streaming way.
|
|
|
|
|
|
|
|
func dumpPreimage(ctx *cli.Context) error { |
|
|
|
|
|
|
|
// Make sure the export json file has been specified.
|
|
|
|
|
|
|
|
if len(ctx.Args()) < 1 { |
|
|
|
|
|
|
|
utils.Fatalf("This command requires an argument.") |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Encode preimage data to json file in streaming way.
|
|
|
|
|
|
|
|
file, err := os.Create(ctx.Args().First()) |
|
|
|
|
|
|
|
if err != nil { |
|
|
|
|
|
|
|
return err |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
encoder := json.NewEncoder(file) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
stack := makeFullNode(ctx) |
|
|
|
|
|
|
|
db := utils.MakeChainDatabase(ctx, stack) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Dump all preimage entries.
|
|
|
|
|
|
|
|
it := db.(*ethdb.LDBDatabase).NewIteratorByPrefix(secureKeyPrefix) |
|
|
|
|
|
|
|
for it.Next() { |
|
|
|
|
|
|
|
hash := it.Key()[len(secureKeyPrefix):] |
|
|
|
|
|
|
|
if err := encoder.Encode(PreimageEntry{common.Bytes2Hex(hash), common.Bytes2Hex(it.Value())}); err != nil { |
|
|
|
|
|
|
|
return err |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
return nil |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// importPreimages imports preimage data from the specified file.
|
|
|
|
|
|
|
|
func importPreimage(ctx *cli.Context) error { |
|
|
|
|
|
|
|
// Make sure the export json file has been specified.
|
|
|
|
|
|
|
|
if len(ctx.Args()) < 1 { |
|
|
|
|
|
|
|
utils.Fatalf("This command requires an argument.") |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Decode the preimage data in streaming way.
|
|
|
|
|
|
|
|
file, err := os.Open(ctx.Args().First()) |
|
|
|
|
|
|
|
if err != nil { |
|
|
|
|
|
|
|
return err |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
decoder := json.NewDecoder(file) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
stack := makeFullNode(ctx) |
|
|
|
|
|
|
|
db := utils.MakeChainDatabase(ctx, stack) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
var ( |
|
|
|
|
|
|
|
entry PreimageEntry |
|
|
|
|
|
|
|
preimages = make(map[common.Hash][]byte) |
|
|
|
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for decoder.More() { |
|
|
|
|
|
|
|
if err := decoder.Decode(&entry); err != nil { |
|
|
|
|
|
|
|
return err |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
preimages[common.HexToHash(entry.Hash)] = common.Hex2Bytes(entry.Preimage) |
|
|
|
|
|
|
|
// Flush to database in batch
|
|
|
|
|
|
|
|
if len(preimages) > 1024 { |
|
|
|
|
|
|
|
err := core.WritePreimages(db, 0, preimages) |
|
|
|
|
|
|
|
if err != nil { |
|
|
|
|
|
|
|
return err |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
preimages = make(map[common.Hash][]byte) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
// Flush the last batch preimage data
|
|
|
|
|
|
|
|
if len(preimages) > 0 { |
|
|
|
|
|
|
|
err := core.WritePreimages(db, 0, preimages) |
|
|
|
|
|
|
|
if err != nil { |
|
|
|
|
|
|
|
return err |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
return nil |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// hashish returns true for strings that look like hashes.
|
|
|
|
// hashish returns true for strings that look like hashes.
|
|
|
|
func hashish(x string) bool { |
|
|
|
func hashish(x string) bool { |
|
|
|
_, err := strconv.Atoi(x) |
|
|
|
_, err := strconv.Atoi(x) |
|
|
|