Skip to content

Commit

Permalink
move clamp_basetype into make_setter
Browse files Browse the repository at this point in the history
this avoids a double dereference.
  • Loading branch information
charles-cooper committed Oct 3, 2021
1 parent f2ac16f commit 204bfa0
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 12 deletions.
1 change: 1 addition & 0 deletions vyper/old_codegen/abi.py
Original file line number Diff line number Diff line change
Expand Up @@ -498,6 +498,7 @@ def abi_encode(dst, lll_node, pos=None, bufsz=None, returns_len=False):
return LLLnode.from_list(lll_ret, pos=pos, annotation=f"abi_encode {lll_node.typ}")


# CMC 20211002 this is probably dead code because make_setter does this.
# lll_node is the destination LLL item, src is the input buffer.
# recursively copy the buffer items into lll_node, based on its type.
# src: pointer to beginning of buffer
Expand Down
30 changes: 18 additions & 12 deletions vyper/old_codegen/parser_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,17 +317,8 @@ def _abi_helper(member_t, ofst, clamp=True):
["ofst"], typ=member_t, location=parent.location, annotation=f"&({typ}->{member_t})"
)

if clamp and _needs_clamp(member_t, parent.encoding):
# special handling for unsanitized external data that need clamping
# TODO optimize me. this results in a double dereference because
# it returns a pointer and not a value. probably the best thing
# is to move the clamp to make_setter
ret = ["with", x, ofst_lll, ["seq", clamp_basetype(x), x]]
else:
ret = ofst_lll

return LLLnode.from_list(
ret,
ofst_lll,
typ=member_t,
location=parent.location,
encoding=parent.encoding,
Expand Down Expand Up @@ -553,15 +544,29 @@ def make_setter(left, right, pos):

# Basic types
if isinstance(left.typ, BaseType):
enc = right.encoding # unwrap_location butchers encoding
right = unwrap_location(right)
# TODO rethink/streamline the clamp_basetype logic
if _needs_clamp(right.typ, enc):
_val = LLLnode("val", typ=right.typ)
right = ["with", _val, right, ["seq", clamp_basetype(_val), _val]]

if left.location == "storage":
return LLLnode.from_list(["sstore", left, right], typ=None)
elif left.location == "memory":
return LLLnode.from_list(["mstore", left, right], typ=None)

# Byte arrays
elif isinstance(left.typ, ByteArrayLike):
return make_byte_array_copier(left, right, pos)
# TODO rethink/streamline the clamp_basetype logic
if _needs_clamp(right.typ, right.encoding):
_val = LLLnode("val", location=right.location, typ=right.typ)
copier = make_byte_array_copier(left, _val, pos)
ret = ["with", _val, right, ["seq", clamp_basetype(_val), copier]]
else:
ret = make_byte_array_copier(left, right, pos)

return LLLnode.from_list(ret)

# Arrays
elif isinstance(left.typ, (ListType, TupleLike)):
Expand Down Expand Up @@ -728,7 +733,8 @@ def _sar(x, bits):


def _needs_clamp(t, encoding):
assert encoding in (Encoding.ABI, Encoding.JSON_ABI)
if encoding not in (Encoding.ABI, Encoding.JSON_ABI):
return False
if isinstance(t, ByteArrayLike):
if encoding == Encoding.JSON_ABI:
# don't have bytestring size bound from json, don't clamp
Expand Down

0 comments on commit 204bfa0

Please sign in to comment.