Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

cmd/geth: implement data import and export #22931

Merged
merged 20 commits into from
Nov 2, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions cmd/geth/chaincmd.go
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,9 @@ be gzipped.`,
},
Category: "BLOCKCHAIN COMMANDS",
Description: `
The import-preimages command imports hash preimages from an RLP encoded stream.`,
The import-preimages command imports hash preimages from an RLP encoded stream.
It's deprecated, please use "geth db import" instead.
`,
}
exportPreimagesCommand = cli.Command{
Action: utils.MigrateFlags(exportPreimages),
Expand All @@ -154,7 +156,9 @@ be gzipped.`,
},
Category: "BLOCKCHAIN COMMANDS",
Description: `
The export-preimages command export hash preimages to an RLP encoded stream`,
The export-preimages command exports hash preimages to an RLP encoded stream.
It's deprecated, please use "geth db export" instead.
`,
}
dumpCommand = cli.Command{
Action: utils.MigrateFlags(dump),
Expand Down Expand Up @@ -368,7 +372,6 @@ func exportPreimages(ctx *cli.Context) error {
if len(ctx.Args()) < 1 {
utils.Fatalf("This command requires an argument.")
}

stack, _ := makeConfigNode(ctx)
defer stack.Close()

Expand Down
166 changes: 166 additions & 0 deletions cmd/geth/dbcmd.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,16 @@
package main

import (
"bytes"
"errors"
"fmt"
"os"
"os/signal"
"path/filepath"
"sort"
"strconv"
"strings"
"syscall"
"time"

"github.com/ethereum/go-ethereum/cmd/utils"
Expand Down Expand Up @@ -63,6 +67,8 @@ Remove blockchain and state databases`,
dbPutCmd,
dbGetSlotsCmd,
dbDumpFreezerIndex,
dbImportCmd,
dbExportCmd,
},
}
dbInspectCmd = cli.Command{
Expand Down Expand Up @@ -188,6 +194,36 @@ WARNING: This is a low-level operation which may cause database corruption!`,
},
Description: "This command displays information about the freezer index.",
}
dbImportCmd = cli.Command{
Action: utils.MigrateFlags(importLDBdata),
Name: "import",
Usage: "Imports leveldb-data from an exported RLP dump.",
ArgsUsage: "<dumpfile> <start (optional)",
Flags: []cli.Flag{
utils.DataDirFlag,
utils.SyncModeFlag,
utils.MainnetFlag,
utils.RopstenFlag,
utils.RinkebyFlag,
utils.GoerliFlag,
},
Description: "The import command imports the specific chain data from an RLP encoded stream.",
}
dbExportCmd = cli.Command{
Action: utils.MigrateFlags(exportChaindata),
Name: "export",
Usage: "Exports the chain data into an RLP dump. If the <dumpfile> has .gz suffix, gzip compression will be used.",
ArgsUsage: "<type> <dumpfile>",
Flags: []cli.Flag{
utils.DataDirFlag,
utils.SyncModeFlag,
utils.MainnetFlag,
utils.RopstenFlag,
utils.RinkebyFlag,
utils.GoerliFlag,
},
Description: "Exports the specified chain data to an RLP encoded stream, optionally gzip-compressed.",
}
)

func removeDB(ctx *cli.Context) error {
Expand Down Expand Up @@ -510,3 +546,133 @@ func parseHexOrString(str string) ([]byte, error) {
}
return b, err
}

func importLDBdata(ctx *cli.Context) error {
start := 0
switch ctx.NArg() {
case 1:
break
case 2:
s, err := strconv.Atoi(ctx.Args().Get(1))
if err != nil {
return fmt.Errorf("second arg must be an integer: %v", err)
}
start = s
default:
return fmt.Errorf("required arguments: %v", ctx.Command.ArgsUsage)
}
var (
fName = ctx.Args().Get(0)
stack, _ = makeConfigNode(ctx)
interrupt = make(chan os.Signal, 1)
stop = make(chan struct{})
)
defer stack.Close()
signal.Notify(interrupt, syscall.SIGINT, syscall.SIGTERM)
defer signal.Stop(interrupt)
defer close(interrupt)
go func() {
if _, ok := <-interrupt; ok {
log.Info("Interrupted during ldb import, stopping at next batch")
}
close(stop)
}()
db := utils.MakeChainDatabase(ctx, stack, false)
return utils.ImportLDBData(db, fName, int64(start), stop)
}

type preimageIterator struct {
iter ethdb.Iterator
}

func (iter *preimageIterator) Next() (byte, []byte, []byte, bool) {
for iter.iter.Next() {
key := iter.iter.Key()
if bytes.HasPrefix(key, rawdb.PreimagePrefix) && len(key) == (len(rawdb.PreimagePrefix)+common.HashLength) {
return utils.OpBatchAdd, key, iter.iter.Value(), true
}
}
return 0, nil, nil, false
}

func (iter *preimageIterator) Release() {
iter.iter.Release()
}

type snapshotIterator struct {
init bool
account ethdb.Iterator
storage ethdb.Iterator
}

func (iter *snapshotIterator) Next() (byte, []byte, []byte, bool) {
if !iter.init {
iter.init = true
return utils.OpBatchDel, rawdb.SnapshotRootKey, nil, true
}
for iter.account.Next() {
key := iter.account.Key()
if bytes.HasPrefix(key, rawdb.SnapshotAccountPrefix) && len(key) == (len(rawdb.SnapshotAccountPrefix)+common.HashLength) {
return utils.OpBatchAdd, key, iter.account.Value(), true
}
}
for iter.storage.Next() {
key := iter.storage.Key()
if bytes.HasPrefix(key, rawdb.SnapshotStoragePrefix) && len(key) == (len(rawdb.SnapshotStoragePrefix)+2*common.HashLength) {
return utils.OpBatchAdd, key, iter.storage.Value(), true
}
}
return 0, nil, nil, false
}

func (iter *snapshotIterator) Release() {
iter.account.Release()
iter.storage.Release()
}

// chainExporters defines the export scheme for all exportable chain data.
var chainExporters = map[string]func(db ethdb.Database) utils.ChainDataIterator{
"preimage": func(db ethdb.Database) utils.ChainDataIterator {
iter := db.NewIterator(rawdb.PreimagePrefix, nil)
return &preimageIterator{iter: iter}
},
"snapshot": func(db ethdb.Database) utils.ChainDataIterator {
account := db.NewIterator(rawdb.SnapshotAccountPrefix, nil)
storage := db.NewIterator(rawdb.SnapshotStoragePrefix, nil)
return &snapshotIterator{account: account, storage: storage}
},
}

func exportChaindata(ctx *cli.Context) error {
if ctx.NArg() < 2 {
return fmt.Errorf("required arguments: %v", ctx.Command.ArgsUsage)
}
// Parse the required chain data type, make sure it's supported.
kind := ctx.Args().Get(0)
kind = strings.ToLower(strings.Trim(kind, " "))
exporter, ok := chainExporters[kind]
if !ok {
var kinds []string
for kind := range chainExporters {
kinds = append(kinds, kind)
}
return fmt.Errorf("invalid data type %s, supported types: %s", kind, strings.Join(kinds, ", "))
}
var (
stack, _ = makeConfigNode(ctx)
interrupt = make(chan os.Signal, 1)
stop = make(chan struct{})
)
defer stack.Close()
signal.Notify(interrupt, syscall.SIGINT, syscall.SIGTERM)
defer signal.Stop(interrupt)
defer close(interrupt)
go func() {
if _, ok := <-interrupt; ok {
log.Info("Interrupted during db export, stopping at next batch")
}
close(stop)
}()
db := utils.MakeChainDatabase(ctx, stack, true)
return utils.ExportChaindata(ctx.Args().Get(1), kind, exporter(db), stop)
}
Loading