Skip to content

Commit

Permalink
fix: update soft block limit to 2MiB
Browse files Browse the repository at this point in the history
This is needed because the 1MiB limit is the chunker limit, however the
actual blocks can be wrapped in protobuf which adds a small ~10 byte
overhead.
  • Loading branch information
Jorropo committed Jan 5, 2023
1 parent 94f8f89 commit 3dd2098
Show file tree
Hide file tree
Showing 6 changed files with 35 additions and 24 deletions.
10 changes: 5 additions & 5 deletions core/commands/cmdutils/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@ import (

const (
AllowBigBlockOptionName = "allow-big-block"
SoftBlockLimit = 1024 * 1024 // https://github.com/ipfs/kubo/issues/7421#issuecomment-910833499
SoftBlockLimit = 1024 * 1024 * 2 // https://github.com/web3-storage/web3.storage/pull/1269#issuecomment-1108834504
)

var AllowBigBlockOption cmds.Option

func init() {
AllowBigBlockOption = cmds.BoolOption(AllowBigBlockOptionName, "Disable block size check and allow creation of blocks bigger than 1MiB. WARNING: such blocks won't be transferable over the standard bitswap.").WithDefault(false)
AllowBigBlockOption = cmds.BoolOption(AllowBigBlockOptionName, "Disable block size check and allow creation of blocks bigger than 2MiB. WARNING: such blocks won't be transferable over the standard bitswap.").WithDefault(false)
}

func CheckCIDSize(req *cmds.Request, c cid.Cid, dagAPI coreiface.APIDagService) error {
Expand All @@ -40,11 +40,11 @@ func CheckBlockSize(req *cmds.Request, size uint64) error {
return nil
}

// We do not allow producing blocks bigger than 1 MiB to avoid errors
// when transmitting them over BitSwap. The 1 MiB constant is an
// We do not allow producing blocks bigger than 2MiB to avoid errors
// when transmitting them over BitSwap. The 2MiB constant is an
// unenforced and undeclared rule of thumb hard-coded here.
if size > SoftBlockLimit {
return fmt.Errorf("produced block is over 1MiB: big blocks can't be exchanged with other peers. consider using UnixFS for automatic chunking of bigger files, or pass --allow-big-block to override")
return fmt.Errorf("produced block is over 2MiB: big blocks can't be exchanged with other peers. consider using UnixFS for automatic chunking of bigger files, or pass --allow-big-block to override")
}
return nil

Expand Down
10 changes: 5 additions & 5 deletions test/sharness/t0050-block.sh
Original file line number Diff line number Diff line change
Expand Up @@ -291,17 +291,17 @@ test_expect_success "put with sha3 and cidv0 fails" '
'

test_expect_success "'ipfs block put' check block size" '
dd if=/dev/zero bs=2MB count=1 > 2-MB-file &&
test_expect_code 1 ipfs block put 2-MB-file >block_put_out 2>&1
dd if=/dev/zero bs=4MB count=1 > 4-MB-file &&
test_expect_code 1 ipfs block put 4-MB-file >block_put_out 2>&1
'

test_expect_success "ipfs block put output has the correct error" '
grep "produced block is over 1MiB" block_put_out
grep "produced block is over 2MiB" block_put_out
'

test_expect_success "ipfs block put --allow-big-block=true works" '
test_expect_code 0 ipfs block put 2-MB-file --allow-big-block=true &&
rm 2-MB-file
test_expect_code 0 ipfs block put 4-MB-file --allow-big-block=true &&
rm 4-MB-file
'

test_done
8 changes: 4 additions & 4 deletions test/sharness/t0051-object.sh
Original file line number Diff line number Diff line change
Expand Up @@ -225,16 +225,16 @@ test_object_cmd() {

test_expect_success "'ipfs object patch' check output block size" '
DIR=$(ipfs object new unixfs-dir)
for i in {1..13}
for i in {1..14}
do
DIR=$(ipfs object patch "$DIR" add-link "$DIR.jpg" "$DIR")
done
# Fail when new block goes over the BS limit of 1MiB, but allow manual override
# Fail when new block goes over the BS limit of 2MiB, but allow manual override
test_expect_code 1 ipfs object patch "$DIR" add-link "$DIR.jpg" "$DIR" >patch_out 2>&1
'

test_expect_success "ipfs object patch add-link output has the correct error" '
grep "produced block is over 1MiB" patch_out
grep "produced block is over 2MiB" patch_out
'

test_expect_success "ipfs object patch --allow-big-block=true add-link works" '
Expand Down Expand Up @@ -310,7 +310,7 @@ test_object_cmd() {
test_expect_success "'ipfs object stat --human' succeeds" '
ipfs object stat $(cat multi_patch)/a --human > obj_stat_human_out
'

test_expect_success "ipfs object stat --human output looks good" '
echo "NumLinks: 1" > obj_stat_human_exp &&
echo "BlockSize: 47" >> obj_stat_human_exp &&
Expand Down
10 changes: 5 additions & 5 deletions test/sharness/t0053-dag.sh
Original file line number Diff line number Diff line change
Expand Up @@ -45,17 +45,17 @@ test_dag_cmd() {
'

test_expect_success "'ipfs dag put' check block size" '
dd if=/dev/zero bs=2MB count=1 > 2-MB-file &&
test_expect_code 1 ipfs dag put --input-codec=raw --store-codec=raw 2-MB-file >dag_put_out 2>&1
dd if=/dev/zero bs=4MB count=1 > 4-MB-file &&
test_expect_code 1 ipfs dag put --input-codec=raw --store-codec=raw 4-MB-file >dag_put_out 2>&1
'

test_expect_success "ipfs dag put output has the correct error" '
grep "produced block is over 1MiB" dag_put_out
grep "produced block is over 2MiB" dag_put_out
'

test_expect_success "ipfs dag put --allow-big-block=true works" '
test_expect_code 0 ipfs dag put --input-codec=raw --store-codec=raw 2-MB-file --allow-big-block=true &&
rm 2-MB-file
test_expect_code 0 ipfs dag put --input-codec=raw --store-codec=raw 4-MB-file --allow-big-block=true &&
rm 4-MB-file
'

test_expect_success "can add an ipld object using dag-json to dag-json" '
Expand Down
10 changes: 5 additions & 5 deletions test/sharness/t0054-dag-car-import-export.sh
Original file line number Diff line number Diff line change
Expand Up @@ -234,16 +234,16 @@ test_expect_success "naked root import expected output" '
'

test_expect_success "'ipfs dag import' check block size" '
BIG_CID=$(dd if=/dev/zero bs=2MB count=1 | ipfs dag put --input-codec=raw --store-codec=raw --allow-big-block) &&
ipfs dag export $BIG_CID > 2-MB-block.car &&
test_expect_code 1 ipfs dag import 2-MB-block.car >dag_import_out 2>&1
BIG_CID=$(dd if=/dev/zero bs=4MB count=1 | ipfs dag put --input-codec=raw --store-codec=raw --allow-big-block) &&
ipfs dag export $BIG_CID > 4-MB-block.car &&
test_expect_code 1 ipfs dag import 4-MB-block.car >dag_import_out 2>&1
'
test_expect_success "ipfs dag import output has the correct error" '
grep "block is over 1MiB" dag_import_out
grep "block is over 2MiB" dag_import_out
'

test_expect_success "ipfs dag import --allow-big-block works" '
test_expect_code 0 ipfs dag import --allow-big-block 2-MB-block.car
test_expect_code 0 ipfs dag import --allow-big-block 4-MB-block.car
'

cat > version_2_import_expected << EOE
Expand Down
11 changes: 11 additions & 0 deletions test/sharness/t0125-twonode.sh
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,15 @@ run_single_file_test() {
check_file_fetch 0 $FILEA_HASH filea
}

run_2MiB_block_test() {
test_expect_success "add a file on node1" '
random $((1024*1024*2)) > filea &&
FILEA_HASH=$(ipfsi 1 block put filea)
'

check_file_fetch 0 $FILEA_HASH filea
}

run_random_dir_test() {
test_expect_success "create a bunch of random files" '
random-files -depth=3 -dirs=4 -files=5 -seed=5 foobar > /dev/null
Expand All @@ -62,6 +71,8 @@ run_advanced_test() {

run_single_file_test

run_2MiB_block_test

run_random_dir_test

test_expect_success "node0 data transferred looks correct" '
Expand Down

0 comments on commit 3dd2098

Please sign in to comment.