diff --git a/core/shared/src/main/scala/sigma/SigmaDsl.scala b/core/shared/src/main/scala/sigma/SigmaDsl.scala index df2b419273..52c2f97bd7 100644 --- a/core/shared/src/main/scala/sigma/SigmaDsl.scala +++ b/core/shared/src/main/scala/sigma/SigmaDsl.scala @@ -459,6 +459,18 @@ trait Header { /** Miner votes for changing system parameters. */ def votes: Coll[Byte] //3 bytes + + /** Bytes which are coming from future versions of the protocol, so + * their meaning is not known to current version of Sigma, but they + * are stored to get the same id as future version users. + */ + def unparsedBytes: Coll[Byte] + + /** + * @return header bytes without proof of work, a PoW is generated over them + */ + def serializeWithoutPoW: Coll[Byte] + } /** Runtime representation of Context ErgoTree type. diff --git a/data/shared/src/main/scala/org/ergoplatform/ErgoHeader.scala b/data/shared/src/main/scala/org/ergoplatform/ErgoHeader.scala new file mode 100644 index 0000000000..2c6a40d46d --- /dev/null +++ b/data/shared/src/main/scala/org/ergoplatform/ErgoHeader.scala @@ -0,0 +1,182 @@ +package org.ergoplatform + +import scorex.crypto.authds.ADDigest +import scorex.crypto.hash.{Blake2b256, Digest32} +import scorex.util.ModifierId +import sigma.Colls +import sigma.crypto.{BigIntegers, CryptoConstants, EcPointType} +import sigma.serialization.{GroupElementSerializer, SigmaByteReader, SigmaByteWriter, SigmaSerializer} + +import scala.runtime.ScalaRunTime +import scala.util.hashing.MurmurHash3 + + + +/** + * Solution for an Autolykos PoW puzzle. + * + * In Autolykos v.1 all the four fields are used, in Autolykos v.2 only pk and n fields are used. + * + * @param pk - miner public key. Should be used to collect block rewards + * @param w - one-time public key. Prevents revealing of miners secret + * @param n - nonce (8 bytes) + * @param d - distance between pseudo-random number, corresponding to nonce `n` and a secret, + * corresponding to `pk`. The lower `d` is, the harder it was to find this solution. + */ +class AutolykosSolution(val pk: EcPointType, + val w: EcPointType, + val n: Array[Byte], + val d: BigInt) { + + val encodedPk: Array[Byte] = GroupElementSerializer.toBytes(pk) + + override def hashCode(): Int = { + var h = pk.hashCode() + h = h * 31 + w.hashCode() + h = h * 31 + MurmurHash3.arrayHash(n) + h = h * 31 + d.hashCode() + h + } + + override def equals(obj: Any): Boolean = { + obj match { + case other: AutolykosSolution => + this.pk == other.pk && + this.n.sameElements(other.n) && + this.w == other.w && + this.d == other.d + + case _ => false + } + } +} + + +object AutolykosSolution { + // "pk", "w" and "d" values for Autolykos v2 solution, where they not passed from outside + val pkForV2: EcPointType = CryptoConstants.dlogGroup.identity + val wForV2: EcPointType = CryptoConstants.dlogGroup.generator + val dForV2: BigInt = 0 + + object sigmaSerializerV1 extends SigmaSerializer[AutolykosSolution, AutolykosSolution] { + override def serialize(s: AutolykosSolution, w: SigmaByteWriter): Unit = { + GroupElementSerializer.serialize(s.pk, w) + GroupElementSerializer.serialize(s.w, w) + require(s.n.length == 8) // non-consensus check on prover side + w.putBytes(s.n) + val dBytes = BigIntegers.asUnsignedByteArray(s.d.bigInteger) + w.putUByte(dBytes.length) + w.putBytes(dBytes) + } + + override def parse(r: SigmaByteReader): AutolykosSolution = { + val pk = GroupElementSerializer.parse(r) + val w = GroupElementSerializer.parse(r) + val nonce = r.getBytes(8) + val dBytesLength = r.getUByte() + val d = BigInt(BigIntegers.fromUnsignedByteArray(r.getBytes(dBytesLength))) + new AutolykosSolution(pk, w, nonce, d) + } + } + + object sigmaSerializerV2 extends SigmaSerializer[AutolykosSolution, AutolykosSolution] { + override def serialize(s: AutolykosSolution, w: SigmaByteWriter): Unit = { + GroupElementSerializer.serialize(s.pk, w) + require(s.n.length == 8) // non-consensus check on prover side + w.putBytes(s.n) + } + + override def parse(r: SigmaByteReader): AutolykosSolution = { + val pk = GroupElementSerializer.parse(r) + val nonce = r.getBytes(8) + new AutolykosSolution(pk, wForV2, nonce, dForV2) + } + } +} + +/** + * Header of a block. It authenticates link to a previous block, other block sections + * (transactions, UTXO set transformation proofs, extension), UTXO set, votes for parameters + * to be changed and proof-of-work related data. + * + * @param version - protocol version + * @param parentId - id of a parent block header + * @param ADProofsRoot - digest of UTXO set transformation proofs + * @param stateRoot - AVL+ tree digest of UTXO set (after the block) + * @param transactionsRoot - Merkle tree digest of transactions in the block (BlockTransactions section) + * @param timestamp - block generation time reported by a miner + * @param nBits - difficulty encoded + * @param height - height of the block (genesis block height == 1) + * @param extensionRoot - Merkle tree digest of the extension section of the block + * @param powSolution - solution for the proof-of-work puzzle + * @param votes - votes for changing system parameters + * @param unparsedBytes - bytes from future versions of the protocol our version can't parse + * @param _bytes - serialized bytes of the header when not `null` + */ +case class ErgoHeader(override val version: ErgoHeader.Version, + override val parentId: ModifierId, + override val ADProofsRoot: Digest32, + override val stateRoot: ADDigest, //33 bytes! extra byte with tree height here! + override val transactionsRoot: Digest32, + override val timestamp: ErgoHeader.Timestamp, + override val nBits: Long, //actually it is unsigned int + override val height: Int, + override val extensionRoot: Digest32, + powSolution: AutolykosSolution, + override val votes: Array[Byte], //3 bytes + override val unparsedBytes: Array[Byte], + _bytes: Array[Byte]) extends + HeaderWithoutPow(version, parentId, ADProofsRoot, stateRoot, transactionsRoot, timestamp, + nBits, height, extensionRoot, votes, unparsedBytes) { + + lazy val bytes = if(_bytes != null) { + _bytes + } else { + ErgoHeader.sigmaSerializer.toBytes(this) + } + + lazy val serializedId: Array[Byte] = Blake2b256.hash(bytes) + + lazy val id = Colls.fromArray(serializedId) + + override def hashCode(): Int = id.hashCode() + + override def equals(other: Any): Boolean = other match { + case h: ErgoHeader => h.id == this.id + case _ => false + } +} + + +object ErgoHeader { + + type Timestamp = Long + + type Version = Byte + + object sigmaSerializer extends SigmaSerializer[ErgoHeader, ErgoHeader] { + override def serialize(hdr: ErgoHeader, w: SigmaByteWriter): Unit = { + HeaderWithoutPowSerializer.serialize(hdr, w) + if (hdr.version == 1) { + AutolykosSolution.sigmaSerializerV1.serialize(hdr.powSolution, w) + } else { + AutolykosSolution.sigmaSerializerV2.serialize(hdr.powSolution, w) + } + } + + override def parse(r: SigmaByteReader): ErgoHeader = { + val start = r.position + val headerWithoutPow = HeaderWithoutPowSerializer.parse(r) + val powSolution = if (headerWithoutPow.version == 1) { + AutolykosSolution.sigmaSerializerV1.parse(r) + } else { + AutolykosSolution.sigmaSerializerV2.parse(r) + } + val end = r.position + val len = end - start + r.position = start + val headerBytes = r.getBytes(len) // also moves position back to end + headerWithoutPow.toHeader(powSolution, headerBytes) + } + } +} \ No newline at end of file diff --git a/data/shared/src/main/scala/org/ergoplatform/HeaderWithoutPow.scala b/data/shared/src/main/scala/org/ergoplatform/HeaderWithoutPow.scala new file mode 100644 index 0000000000..4eba9b708e --- /dev/null +++ b/data/shared/src/main/scala/org/ergoplatform/HeaderWithoutPow.scala @@ -0,0 +1,139 @@ +package org.ergoplatform + +import scorex.crypto.authds.ADDigest +import scorex.crypto.hash.Digest32 +import scorex.util.{ModifierId, bytesToId, idToBytes} +import sigma.serialization.{SigmaByteReader, SigmaByteWriter, SigmaSerializer} +import scorex.util.Extensions._ + +/** + * Header without proof-of-work puzzle solution, see Header class description for details. + */ +class HeaderWithoutPow(val version: Byte, // 1 byte + val parentId: ModifierId, // 32 bytes + val ADProofsRoot: Digest32, // 32 bytes + val stateRoot: ADDigest, //33 bytes! extra byte with tree height here! + val transactionsRoot: Digest32, // 32 bytes + val timestamp: Long, + val nBits: Long, //actually it is unsigned int + val height: Int, + val extensionRoot: Digest32, + val votes: Array[Byte], //3 bytes + val unparsedBytes: Array[Byte]) { + def toHeader(powSolution: AutolykosSolution, bytes: Array[Byte]): ErgoHeader = + ErgoHeader(version, parentId, ADProofsRoot, stateRoot, transactionsRoot, timestamp, + nBits, height, extensionRoot, powSolution, votes, unparsedBytes, bytes) +} + +object HeaderWithoutPow { + + def apply(version: Byte, parentId: ModifierId, ADProofsRoot: Digest32, stateRoot: ADDigest, + transactionsRoot: Digest32, timestamp: Long, nBits: Long, height: Int, + extensionRoot: Digest32, votes: Array[Byte], unparsedBytes: Array[Byte]): HeaderWithoutPow = { + new HeaderWithoutPow(version, parentId, ADProofsRoot, stateRoot, transactionsRoot, timestamp, + nBits, height, extensionRoot, votes, unparsedBytes) + } + +} + +object HeaderWithoutPowSerializer extends SigmaSerializer[HeaderWithoutPow, HeaderWithoutPow] { + + override def serialize(h: HeaderWithoutPow, w: SigmaByteWriter): Unit = { + w.put(h.version) + w.putBytes(idToBytes(h.parentId)) + w.putBytes(h.ADProofsRoot) + w.putBytes(h.transactionsRoot) + w.putBytes(h.stateRoot) + w.putULong(h.timestamp) + w.putBytes(h.extensionRoot) + DifficultySerializer.serialize(h.nBits, w) + w.putUInt(h.height.toLong) + w.putBytes(h.votes) + + // For block version >= 2, this new byte encodes length of possible new fields. + // Set to 0 for now, so no new fields. + if (h.version > HeaderVersion.InitialVersion) { + w.putUByte(h.unparsedBytes.length) + w.putBytes(h.unparsedBytes) + } + } + + override def parse(r: SigmaByteReader): HeaderWithoutPow = { + val version = r.getByte() + val parentId = bytesToId(r.getBytes(32)) + val ADProofsRoot = Digest32 @@ r.getBytes(32) + val transactionsRoot = Digest32 @@ r.getBytes(32) + val stateRoot = ADDigest @@ r.getBytes(33) + val timestamp = r.getULong() + val extensionHash = Digest32 @@ r.getBytes(32) + val nBits = DifficultySerializer.parse(r) + val height = r.getUInt().toIntExact + val votes = r.getBytes(3) + + // For block version >= 2, a new byte encodes length of possible new fields. + // If this byte > 0, we read new fields but do nothing, as semantics of the fields is not known. + val unparsedBytes = if (version > HeaderVersion.InitialVersion) { + val newFieldsSize = r.getUByte() + if (newFieldsSize > 0 && version > HeaderVersion.Interpreter60Version) { + // new bytes could be added only for block version >= 5 + r.getBytes(newFieldsSize) + } else { + Array.emptyByteArray + } + } else { + Array.emptyByteArray + } + + HeaderWithoutPow(version, parentId, ADProofsRoot, stateRoot, transactionsRoot, timestamp, + nBits, height, extensionHash, votes, unparsedBytes) + } + +} + + +object DifficultySerializer extends SigmaSerializer[Long, Long] { + + /** Parse 4 bytes from the byte array (starting at the offset) as unsigned 32-bit integer in big endian format. */ + def readUint32BE(bytes: Array[Byte]): Long = ((bytes(0) & 0xffL) << 24) | ((bytes(1) & 0xffL) << 16) | ((bytes(2) & 0xffL) << 8) | (bytes(3) & 0xffL) + + def uint32ToByteArrayBE(value: Long): Array[Byte] = { + Array(0xFF & (value >> 24), 0xFF & (value >> 16), 0xFF & (value >> 8), 0xFF & value).map(_.toByte) + } + + override def serialize(obj: Long, w: SigmaByteWriter): Unit = { + w.putBytes(uint32ToByteArrayBE(obj)) + } + + override def parse(r: SigmaByteReader): Long = { + readUint32BE(r.getBytes(4)) + } + +} + +object HeaderVersion { + type Value = Byte + + /** + * Block version during mainnet launch + */ + val InitialVersion: Value = 1.toByte + + /** + * Block version after the Hardening hard-fork + * Autolykos v2 PoW, witnesses in transactions Merkle tree + */ + val HardeningVersion: Value = 2.toByte + + /** + * Block version after the 5.0 soft-fork + * 5.0 interpreter with JITC, monotonic height rule (EIP-39) + */ + val Interpreter50Version: Value = 3.toByte + + /** + * Block version after the 6.0 soft-fork + * 6.0 interpreter (EIP-50) + */ + val Interpreter60Version: Value = 4.toByte + +} diff --git a/data/shared/src/main/scala/sigma/data/CHeader.scala b/data/shared/src/main/scala/sigma/data/CHeader.scala new file mode 100644 index 0000000000..d0b8db6173 --- /dev/null +++ b/data/shared/src/main/scala/sigma/data/CHeader.scala @@ -0,0 +1,140 @@ +package sigma.data + +import org.ergoplatform.{AutolykosSolution, ErgoHeader, HeaderWithoutPow, HeaderWithoutPowSerializer} +import scorex.crypto.authds.ADDigest +import scorex.crypto.hash.Digest32 +import scorex.util.{bytesToId, idToBytes} +import sigma.{AvlTree, BigInt, Coll, Colls, GroupElement, Header} + +/** A default implementation of [[Header]] interface. + * + * @see [[Header]] for detailed descriptions + */ +class CHeader(val ergoHeader: ErgoHeader) extends Header with WrapperOf[ErgoHeader] { + + /** Bytes representation of ModifierId of this Header */ + override lazy val id: Coll[Byte] = ergoHeader.id + + /** Block version, to be increased on every soft and hardfork. */ + override def version: Byte = ergoHeader.version + + /** Bytes representation of ModifierId of the parent block */ + override def parentId: Coll[Byte] = Colls.fromArray(idToBytes(ergoHeader.parentId)) + + /** Hash of ADProofs for transactions in a block */ + override def ADProofsRoot: Coll[Byte] = Colls.fromArray(ergoHeader.ADProofsRoot) + + /** AvlTree of a state after block application */ + override def stateRoot: AvlTree = CAvlTree(AvlTreeData.avlTreeFromDigest(Colls.fromArray(ergoHeader.stateRoot))) + + /** Root hash (for a Merkle tree) of transactions in a block. */ + override def transactionsRoot: Coll[Byte] = Colls.fromArray(ergoHeader.transactionsRoot) + + /** Block timestamp (in milliseconds since beginning of Unix Epoch) */ + override def timestamp: Long = ergoHeader.timestamp + + /** Current difficulty in a compressed view. + * NOTE: actually it is unsigned Int */ + override def nBits: Long = ergoHeader.nBits + + /** Block height */ + override def height: Int = ergoHeader.height + + /** Root hash of extension section */ + override def extensionRoot: Coll[Byte] = Colls.fromArray(ergoHeader.extensionRoot) + + /** Miner public key. Should be used to collect block rewards. + * Part of Autolykos solution. */ + override def minerPk: GroupElement = CGroupElement(ergoHeader.powSolution.pk) + + /** One-time public key. Prevents revealing of miners secret. */ + override def powOnetimePk: GroupElement = CGroupElement(ergoHeader.powSolution.w) + + /** nonce */ + override def powNonce: Coll[Byte] = Colls.fromArray(ergoHeader.powSolution.n) + + /** Distance between pseudo-random number, corresponding to nonce `powNonce` and a secret, + * corresponding to `minerPk`. The lower `powDistance` is, the harder it was to find this solution. */ + override def powDistance: BigInt = CBigInt(ergoHeader.powSolution.d.bigInteger) + + /** Miner votes for changing system parameters. */ + override def votes: Coll[Byte] = Colls.fromArray(ergoHeader.votes) + + override def unparsedBytes: Coll[Byte] = Colls.fromArray(ergoHeader.unparsedBytes) + + /** The data value wrapped by this wrapper. */ + override def wrappedValue: ErgoHeader = ergoHeader + + override def serializeWithoutPoW: Coll[Byte] = { + val headerWithoutPow = HeaderWithoutPow(version, bytesToId(parentId.toArray), Digest32 @@ ADProofsRoot.toArray, + ADDigest @@ stateRoot.digest.toArray, Digest32 @@ transactionsRoot.toArray, timestamp, + nBits, height, Digest32 @@ extensionRoot.toArray, votes.toArray, unparsedBytes.toArray) + Colls.fromArray(HeaderWithoutPowSerializer.toBytes(headerWithoutPow)) + } + + override def toString: String = + s"""CHeader( + | id: ${id}, + | version: ${version}, + | tx proofs hash: ${ADProofsRoot}, + | state root: ${stateRoot.digest}, + | transactions root: ${transactionsRoot}, + | time: $timestamp, + | nbits: $nBits, + | extension root: $extensionRoot, + | miner pubkey: $minerPk, + | pow one time pubkey(from AL 1): $powOnetimePk, + | pow nonce: $powNonce, + | pow distance (from AL 1): $powDistance, + | votes: $votes, + | unparsed bytes: $unparsedBytes + |)""".stripMargin + + override def hashCode(): Int = id.hashCode() + + override def equals(other: Any): Boolean = other match { + case ch: CHeader => ch.id == this.id + case _ => false + } + + def copy(): CHeader = new CHeader(ergoHeader.copy()) // used in tests only +} + +object CHeader { + + def apply( version: Byte, + parentId: Coll[Byte], + ADProofsRoot: Coll[Byte], + stateRootDigest: Coll[Byte], + transactionsRoot: Coll[Byte], + timestamp: Long, + nBits: Long, + height: Int, + extensionRoot: Coll[Byte], + minerPk: GroupElement, + powOnetimePk: GroupElement, + powNonce: Coll[Byte], + powDistance: BigInt, + votes: Coll[Byte], + unparsedBytes: Coll[Byte]): CHeader = { + + val solution = new AutolykosSolution( + minerPk.asInstanceOf[CGroupElement].wrappedValue, + powOnetimePk.asInstanceOf[CGroupElement].wrappedValue, + powNonce.toArray, + powDistance.asInstanceOf[CBigInt].wrappedValue) + + val h = ErgoHeader(version, bytesToId(parentId.toArray), Digest32 @@ ADProofsRoot.toArray, + ADDigest @@ stateRootDigest.toArray, Digest32 @@ transactionsRoot.toArray, timestamp, nBits, height, + Digest32 @@ extensionRoot.toArray, solution, votes.toArray, unparsedBytes.toArray, null) + + new CHeader(h) + } + + /** Size of of Header.votes array. */ + val VotesSize: Int = SigmaConstants.VotesArraySize.value + + /** Size of nonce array from Autolykos POW solution in Header.powNonce array. */ + val NonceSize: Int = SigmaConstants.AutolykosPowSolutionNonceArraySize.value + +} diff --git a/data/shared/src/main/scala/sigma/serialization/DataSerializer.scala b/data/shared/src/main/scala/sigma/serialization/DataSerializer.scala index 5f554e96a1..92a54f9aa4 100644 --- a/data/shared/src/main/scala/sigma/serialization/DataSerializer.scala +++ b/data/shared/src/main/scala/sigma/serialization/DataSerializer.scala @@ -1,8 +1,9 @@ package sigma.serialization -import org.ergoplatform.ErgoBox +import org.ergoplatform.{ErgoBox, ErgoHeader} +import sigma.VersionContext import sigma.ast._ -import sigma.data.CBox +import sigma.data.{CBox, CHeader} /** This works in tandem with ConstantSerializer, if you change one make sure to check the other.*/ object DataSerializer extends CoreDataSerializer { @@ -15,6 +16,9 @@ object DataSerializer extends CoreDataSerializer { case SBox => val b = v.asInstanceOf[CBox] ErgoBox.sigmaSerializer.serialize(b.ebox, w.asInstanceOf[SigmaByteWriter]) + case SHeader if VersionContext.current.isV6SoftForkActivated => + val h = v.asInstanceOf[CHeader] + ErgoHeader.sigmaSerializer.serialize(h.ergoHeader, w.asInstanceOf[SigmaByteWriter]) case _ => super.serialize(v, tpe, w) } @@ -32,6 +36,12 @@ object DataSerializer extends CoreDataSerializer { val res = CBox(ErgoBox.sigmaSerializer.parse(r.asInstanceOf[SigmaByteReader])) r.level = r.level - 1 res + case SHeader if VersionContext.current.isV6SoftForkActivated => + val depth = r.level + r.level = depth + 1 + val res = new CHeader(ErgoHeader.sigmaSerializer.parse(r.asInstanceOf[SigmaByteReader])) + r.level = r.level - 1 + res case t => super.deserialize(t, r) }).asInstanceOf[T#WrappedType] diff --git a/interpreter/shared/src/main/scala/sigmastate/eval/CHeader.scala b/interpreter/shared/src/main/scala/sigmastate/eval/CHeader.scala deleted file mode 100644 index 7062fa0f0e..0000000000 --- a/interpreter/shared/src/main/scala/sigmastate/eval/CHeader.scala +++ /dev/null @@ -1,34 +0,0 @@ -package sigmastate.eval - -import sigma.data.SigmaConstants -import sigma.{AvlTree, BigInt, Coll, GroupElement, Header} - -/** A default implementation of [[Header]] interface. - * - * @see [[Header]] for detailed descriptions - */ -case class CHeader( - id: Coll[Byte], - version: Byte, - parentId: Coll[Byte], - ADProofsRoot: Coll[Byte], - stateRoot: AvlTree, - transactionsRoot: Coll[Byte], - timestamp: Long, - nBits: Long, - height: Int, - extensionRoot: Coll[Byte], - minerPk: GroupElement, - powOnetimePk: GroupElement, - powNonce: Coll[Byte], - powDistance: BigInt, - votes: Coll[Byte] -) extends Header - -object CHeader { - /** Size of of Header.votes array. */ - val VotesSize: Int = SigmaConstants.VotesArraySize.value - - /** Size of nonce array from Autolykos POW solution in Header.powNonce array. */ - val NonceSize: Int = SigmaConstants.AutolykosPowSolutionNonceArraySize.value -} \ No newline at end of file diff --git a/interpreter/shared/src/main/scala/sigmastate/interpreter/CErgoTreeEvaluator.scala b/interpreter/shared/src/main/scala/sigmastate/interpreter/CErgoTreeEvaluator.scala index de8aa6b620..2f7d527e74 100644 --- a/interpreter/shared/src/main/scala/sigmastate/interpreter/CErgoTreeEvaluator.scala +++ b/interpreter/shared/src/main/scala/sigmastate/interpreter/CErgoTreeEvaluator.scala @@ -5,7 +5,7 @@ import sigma.ast._ import sigma.ast.syntax._ import sigmastate.eval.{CAvlTreeVerifier, CProfiler} import sigmastate.interpreter.Interpreter.ReductionResult -import sigma.{AvlTree, Coll, Colls, Context, VersionContext} +import sigma.{AvlTree, Coll, Colls, Context, Header, VersionContext} import sigma.util.Extensions._ import debox.{cfor, Buffer => DBuffer} import scorex.crypto.authds.ADKey diff --git a/interpreter/shared/src/test/scala/sigma/serialization/ConstantSerializerSpecification.scala b/interpreter/shared/src/test/scala/sigma/serialization/ConstantSerializerSpecification.scala index 43e9cf9e5d..c9478c8356 100644 --- a/interpreter/shared/src/test/scala/sigma/serialization/ConstantSerializerSpecification.scala +++ b/interpreter/shared/src/test/scala/sigma/serialization/ConstantSerializerSpecification.scala @@ -9,7 +9,7 @@ import sigma.ast.{BigIntConstant, ByteArrayConstant, Constant, DeserializationSi import sigmastate.eval._ import sigma.Extensions.ArrayOps import sigma.ast._ -import sigma.{AvlTree, Colls, Evaluation} +import sigma.{AvlTree, Colls, Evaluation, Header, VersionContext} import sigma.ast.SType.AnyOps import scorex.util.encode.Base16 import sigma.ast.BoolArrayConstant.BoolArrayTypeCode @@ -17,6 +17,7 @@ import sigma.ast.ByteArrayConstant.ByteArrayTypeCode import sigma.ast.syntax.{BoolValue, SValue} import sigma.crypto.EcPointType import sigma.util.Extensions.{BigIntegerOps, EcpOps, SigmaBooleanOps} + import scala.annotation.nowarn class ConstantSerializerSpecification extends TableSerializationSpecification { @@ -25,22 +26,29 @@ class ConstantSerializerSpecification extends TableSerializationSpecification { implicit val wWrapped = wrappedTypeGen(tpe) implicit val tT = Evaluation.stypeToRType(tpe) implicit val tag = tT.classTag + + val withVersion = if (tpe == SHeader) { + Some(VersionContext.V6SoftForkVersion) + } else { + None + } + forAll { xs: Array[T#WrappedType] => implicit val tAny = sigma.AnyType - roundTripTest(Constant[SCollection[T]](xs.toColl, SCollection(tpe))) - roundTripTest(Constant[SType](xs.toColl.map(x => (x, x)).asWrappedType, SCollection(STuple(tpe, tpe)))) // pairs are special case + roundTripTest(Constant[SCollection[T]](xs.toColl, SCollection(tpe)), withVersion) + roundTripTest(Constant[SType](xs.toColl.map(x => (x, x)).asWrappedType, SCollection(STuple(tpe, tpe))), withVersion) // pairs are special case val triples = xs.toColl.map(x => TupleColl(x, x, x)).asWrappedType - roundTripTest(Constant[SType](triples, SCollection(STuple(tpe, tpe, tpe)))) + roundTripTest(Constant[SType](triples, SCollection(STuple(tpe, tpe, tpe))), withVersion) val quartets = xs.toColl.map(x => TupleColl(x, x, x, x)).asWrappedType - roundTripTest(Constant[SType](quartets, SCollection(STuple(tpe, tpe, tpe, tpe)))) - roundTripTest(Constant[SCollection[SCollection[T]]](xs.toColl.map(x => Colls.fromItems(x, x)), SCollection(SCollection(tpe)))) + roundTripTest(Constant[SType](quartets, SCollection(STuple(tpe, tpe, tpe, tpe))), withVersion) + roundTripTest(Constant[SCollection[SCollection[T]]](xs.toColl.map(x => Colls.fromItems(x, x)), SCollection(SCollection(tpe))), withVersion) roundTripTest(Constant[SType]( xs.toColl.map { x => val arr = Colls.fromItems(x, x) (arr, arr) }.asWrappedType, SCollection(STuple(SCollection(tpe), SCollection(tpe))) - )) + ), withVersion) } } @@ -49,14 +57,19 @@ class ConstantSerializerSpecification extends TableSerializationSpecification { implicit val tT = Evaluation.stypeToRType(tpe) @nowarn implicit val tag = tT.classTag implicit val tAny: RType[Any] = sigma.AnyType + val withVersion = if (tpe == SHeader) { + Some(VersionContext.V6SoftForkVersion) + } else { + None + } forAll { in: (T#WrappedType, T#WrappedType) => val (x,y) = (in._1, in._2) - roundTripTest(Constant[SType]((x, y).asWrappedType, STuple(tpe, tpe))) - roundTripTest(Constant[SType](TupleColl(x, y, x).asWrappedType, STuple(tpe, tpe, tpe))) - roundTripTest(Constant[SType](TupleColl(x, y, x, y).asWrappedType, STuple(tpe, tpe, tpe, tpe))) - roundTripTest(Constant[STuple](Colls.fromItems[Any](x, y, (x, y)), STuple(tpe, tpe, STuple(tpe, tpe)))) - roundTripTest(Constant[STuple](Colls.fromItems[Any](x, y, TupleColl(x, y, x)), STuple(tpe, tpe, STuple(tpe, tpe, tpe)))) - roundTripTest(Constant[STuple](Colls.fromItems[Any](x, y, TupleColl(x, y, (x, y))), STuple(tpe, tpe, STuple(tpe, tpe, STuple(tpe, tpe))))) + roundTripTest(Constant[SType]((x, y).asWrappedType, STuple(tpe, tpe)), withVersion) + roundTripTest(Constant[SType](TupleColl(x, y, x).asWrappedType, STuple(tpe, tpe, tpe)), withVersion) + roundTripTest(Constant[SType](TupleColl(x, y, x, y).asWrappedType, STuple(tpe, tpe, tpe, tpe)), withVersion) + roundTripTest(Constant[STuple](Colls.fromItems[Any](x, y, (x, y)), STuple(tpe, tpe, STuple(tpe, tpe))), withVersion) + roundTripTest(Constant[STuple](Colls.fromItems[Any](x, y, TupleColl(x, y, x)), STuple(tpe, tpe, STuple(tpe, tpe, tpe))), withVersion) + roundTripTest(Constant[STuple](Colls.fromItems[Any](x, y, TupleColl(x, y, (x, y))), STuple(tpe, tpe, STuple(tpe, tpe, STuple(tpe, tpe)))), withVersion) } } @@ -71,6 +84,7 @@ class ConstantSerializerSpecification extends TableSerializationSpecification { forAll { x: SigmaBoolean => roundTripTest(Constant[SSigmaProp.type](x.toSigmaProp, SSigmaProp)) } forAll { x: ErgoBox => roundTripTest(Constant[SBox.type](x, SBox)) } forAll { x: AvlTree => roundTripTest(Constant[SAvlTree.type](x, SAvlTree)) } + forAll { x: Header => roundTripTest(Constant[SHeader.type](x, SHeader), Some(VersionContext.V6SoftForkVersion)) } forAll { x: Array[Byte] => roundTripTest(Constant[SByteArray](x.toColl, SByteArray)) } forAll { t: SPredefType => testCollection(t) } forAll { t: SPredefType => testTuples(t) } @@ -88,6 +102,7 @@ class ConstantSerializerSpecification extends TableSerializationSpecification { testCollection(SUnit) testCollection(SBox) testCollection(SAvlTree) + testCollection(SHeader) } private def caseObjectValue(v: SValue) = (v, Array[Byte](v.opCode)) diff --git a/interpreter/shared/src/test/scala/sigma/serialization/DataSerializerSpecification.scala b/interpreter/shared/src/test/scala/sigma/serialization/DataSerializerSpecification.scala index 2d9da3a87e..fe6f62dbe0 100644 --- a/interpreter/shared/src/test/scala/sigma/serialization/DataSerializerSpecification.scala +++ b/interpreter/shared/src/test/scala/sigma/serialization/DataSerializerSpecification.scala @@ -3,10 +3,10 @@ package sigma.serialization import java.math.BigInteger import org.ergoplatform.ErgoBox import org.scalacheck.Arbitrary._ -import sigma.data.{DataValueComparer, OptionType, RType, SigmaBoolean, TupleColl} +import sigma.data.{CBigInt, CHeader, DataValueComparer, OptionType, RType, SigmaBoolean, TupleColl} import sigma.ast.SCollection.SByteArray import sigmastate.eval._ -import sigma.{AvlTree, Colls, Evaluation, VersionContext} +import sigma.{AvlTree, Colls, Evaluation, Header, VersionContext} import sigma.ast.SType.AnyOps import sigma.ast._ import org.scalacheck.Gen @@ -23,29 +23,41 @@ import scala.reflect.ClassTag class DataSerializerSpecification extends SerializationSpecification { - def roundtrip[T <: SType](obj: T#WrappedType, tpe: T) = { - val w = SigmaSerializer.startWriter() - DataSerializer.serialize(obj, tpe, w) - val bytes = w.toBytes - val r = SigmaSerializer.startReader(bytes) - val res = DataSerializer.deserialize(tpe, r) - res shouldBe obj - - val es = CErgoTreeEvaluator.DefaultEvalSettings - val accumulator = new CostAccumulator( - initialCost = JitCost(0), - costLimit = Some(JitCost.fromBlockCost(es.scriptCostLimitInEvaluator))) - val evaluator = new CErgoTreeEvaluator( - context = null, - constants = ErgoTree.EmptyConstants, - coster = accumulator, DefaultProfiler, es) - val ok = DataValueComparer.equalDataValues(res, obj)(evaluator) - ok shouldBe true - - val randomPrefix = arrayGen[Byte].sample.get - val r2 = SigmaSerializer.startReader(randomPrefix ++ bytes, randomPrefix.length) - val res2 = DataSerializer.deserialize(tpe, r2) - res2 shouldBe obj + def roundtrip[T <: SType](obj: T#WrappedType, tpe: T, withVersion: Option[Byte] = None) = { + + def test() = { + val w = SigmaSerializer.startWriter() + DataSerializer.serialize(obj, tpe, w) + val bytes = w.toBytes + val r = SigmaSerializer.startReader(bytes) + val res = DataSerializer.deserialize(tpe, r) + res shouldBe obj + + val es = CErgoTreeEvaluator.DefaultEvalSettings + val accumulator = new CostAccumulator( + initialCost = JitCost(0), + costLimit = Some(JitCost.fromBlockCost(es.scriptCostLimitInEvaluator))) + val evaluator = new CErgoTreeEvaluator( + context = null, + constants = ErgoTree.EmptyConstants, + coster = accumulator, DefaultProfiler, es) + val ok = DataValueComparer.equalDataValues(res, obj)(evaluator) + ok shouldBe true + + val randomPrefix = arrayGen[Byte].sample.get + val r2 = SigmaSerializer.startReader(randomPrefix ++ bytes, randomPrefix.length) + val res2 = DataSerializer.deserialize(tpe, r2) + res2 shouldBe obj + } + + withVersion match { + case Some(ver) => + VersionContext.withVersions(ver, 1) { + test() + } + case None => + test() + } } def testCollection[T <: SType](tpe: T) = { @@ -53,25 +65,32 @@ class DataSerializerSpecification extends SerializationSpecification { implicit val tT = Evaluation.stypeToRType(tpe) implicit val tagT = tT.classTag implicit val tAny = sigma.AnyType + + val withVersion = if (tpe == SHeader) { + Some(VersionContext.V6SoftForkVersion) + } else { + None + } forAll { xs: Array[T#WrappedType] => - roundtrip[SCollection[T]](xs.toColl, SCollection(tpe)) - roundtrip[SType](xs.toColl.map(x => (x, x)).asWrappedType, SCollection(STuple(tpe, tpe))) + roundtrip[SCollection[T]](xs.toColl, SCollection(tpe), withVersion) + roundtrip[SType](xs.toColl.map(x => (x, x)).asWrappedType, SCollection(STuple(tpe, tpe)), withVersion) val triples = xs.toColl.map(x => TupleColl(x, x, x)).asWrappedType - roundtrip(triples, SCollection(STuple(tpe, tpe, tpe))) + roundtrip(triples, SCollection(STuple(tpe, tpe, tpe)), withVersion) val quartets = xs.toColl.map(x => TupleColl(x, x, x, x)).asWrappedType - roundtrip(quartets, SCollection(STuple(tpe, tpe, tpe, tpe))) + roundtrip(quartets, SCollection(STuple(tpe, tpe, tpe, tpe)), withVersion) val nested = xs.toColl.map(x => Colls.fromItems[T#WrappedType](x, x)) - roundtrip[SCollection[SCollection[T]]](nested, SCollection(SCollection(tpe))) + roundtrip[SCollection[SCollection[T]]](nested, SCollection(SCollection(tpe)), withVersion) roundtrip[SType]( xs.toColl.map { x => val arr = Colls.fromItems[T#WrappedType](x, x) (arr, arr) }.asWrappedType, - SCollection(STuple(SCollection(tpe), SCollection(tpe))) + SCollection(STuple(SCollection(tpe), SCollection(tpe))), + withVersion ) } } @@ -81,14 +100,19 @@ class DataSerializerSpecification extends SerializationSpecification { val tT = Evaluation.stypeToRType(tpe) @nowarn implicit val tag: ClassTag[T#WrappedType] = tT.classTag implicit val tAny : RType[Any] = sigma.AnyType + val withVersion = if (tpe == SHeader) { + Some(VersionContext.V6SoftForkVersion) + } else { + None + } forAll { in: (T#WrappedType, T#WrappedType) => val (x,y) = (in._1, in._2) - roundtrip[SType]((x, y).asWrappedType, STuple(tpe, tpe)) - roundtrip[SType](TupleColl(x, y, x).asWrappedType, STuple(tpe, tpe, tpe)) - roundtrip[SType](TupleColl(x, y, x, y).asWrappedType, STuple(tpe, tpe, tpe, tpe)) - roundtrip[STuple](Colls.fromItems[Any](x, y, (x, y)), STuple(tpe, tpe, STuple(tpe, tpe))) - roundtrip[STuple](Colls.fromItems[Any](x, y, TupleColl(x, y, x)), STuple(tpe, tpe, STuple(tpe, tpe, tpe))) - roundtrip[STuple](Colls.fromItems[Any](x, y, TupleColl(x, y, (x, y))), STuple(tpe, tpe, STuple(tpe, tpe, STuple(tpe, tpe)))) + roundtrip[SType]((x, y).asWrappedType, STuple(tpe, tpe), withVersion) + roundtrip[SType](TupleColl(x, y, x).asWrappedType, STuple(tpe, tpe, tpe), withVersion) + roundtrip[SType](TupleColl(x, y, x, y).asWrappedType, STuple(tpe, tpe, tpe, tpe), withVersion) + roundtrip[STuple](Colls.fromItems[Any](x, y, (x, y)), STuple(tpe, tpe, STuple(tpe, tpe)), withVersion) + roundtrip[STuple](Colls.fromItems[Any](x, y, TupleColl(x, y, x)), STuple(tpe, tpe, STuple(tpe, tpe, tpe)), withVersion) + roundtrip[STuple](Colls.fromItems[Any](x, y, TupleColl(x, y, (x, y))), STuple(tpe, tpe, STuple(tpe, tpe, STuple(tpe, tpe))), withVersion) } } @@ -129,6 +153,7 @@ class DataSerializerSpecification extends SerializationSpecification { forAll { x: ErgoBox => roundtrip[SBox.type](x, SBox) } forAll { x: AvlTree => roundtrip[SAvlTree.type](x, SAvlTree) } forAll { x: Array[Byte] => roundtrip[SByteArray](x.toColl, SByteArray) } + forAll { x: Header => roundtrip[SHeader.type](x, SHeader, Some(VersionContext.V6SoftForkVersion)) } forAll { t: SPredefType => testCollection(t) } forAll { t: SPredefType => testTuples(t) } forAll { t: SPredefType => testOption(t) } @@ -159,6 +184,42 @@ class DataSerializerSpecification extends SerializationSpecification { t.isInstanceOf[SerializerException] && t.getMessage.contains(s"BigInt value doesn't not fit into ${SBigInt.MaxSizeInBytes} bytes") }) + } + property("nuanced versioned test for header roundtrip") { + VersionContext.withVersions(VersionContext.V6SoftForkVersion, 1) { + forAll { x: Header => roundtrip[SHeader.type](x, SHeader) } + } + + an[SerializerException] should be thrownBy ( + VersionContext.withVersions((VersionContext.V6SoftForkVersion - 1).toByte, 1) { + val h = headerGen.sample.get + roundtrip[SHeader.type](h, SHeader) + }) + } + + property("header vector") { + val header = CHeader( + 0.toByte, + Helpers.decodeBytes("7a7fe5347f09017818010062000001807f86808000ff7f66ffb07f7ad27f3362"), + Helpers.decodeBytes("c1d70ad9b1ffc1fb9a715fff19807f2401017fcd8b73db017f1cff77727fff08"), + Helpers.decodeBytes("54d23dd080006bdb56800100356080935a80ffb77e90b800057f00661601807f17"), + Helpers.decodeBytes("5e7f1164ccd0990080c501fc0e0181cb387fc17f00ff00c7d5ff767f91ff5e68"), + -7421721754642387858L, + -4826493284887861030L, + 10, + Helpers.decodeBytes("e580c88001ff6fc89c5501017f80e001ff0101fe48c153ff7f00666b80d780ab"), + Helpers.decodeGroupElement("03e7f2875298fddd933c2e0a38968fe85bdeeb70dd8b389559a1d36e2ff1b58fc5"), + Helpers.decodeGroupElement("034e2d3b5f9e409e3ae8a2e768340760362ca33764eda5855f7a43487f14883300"), + Helpers.decodeBytes("974651c9efff7f00"), + CBigInt(new BigInteger("478e827dfa1e4b57", 16)), + Helpers.decodeBytes("01ff13"), + Colls.emptyColl + ) + + VersionContext.withVersions(VersionContext.V6SoftForkVersion, 1) { + roundtrip[SHeader.type](header, SHeader) + } } + } diff --git a/interpreter/shared/src/test/scala/sigma/serialization/SerializationSpecification.scala b/interpreter/shared/src/test/scala/sigma/serialization/SerializationSpecification.scala index 30ae6af19b..aa7a8722ba 100644 --- a/interpreter/shared/src/test/scala/sigma/serialization/SerializationSpecification.scala +++ b/interpreter/shared/src/test/scala/sigma/serialization/SerializationSpecification.scala @@ -8,6 +8,7 @@ import org.scalacheck.Arbitrary._ import org.scalatest.matchers.should.Matchers import org.scalatest.propspec.AnyPropSpec import org.scalatestplus.scalacheck.{ScalaCheckDrivenPropertyChecks, ScalaCheckPropertyChecks} +import sigma.VersionContext import sigma.ast.SType import sigma.ast._ import sigmastate.helpers.NegativeTesting @@ -26,10 +27,20 @@ trait SerializationSpecification extends AnyPropSpec with ValidationSpecification with NegativeTesting { - protected def roundTripTest[V <: Value[_ <: SType]](v: V): Assertion = { - val bytes = ValueSerializer.serialize(v) - predefinedBytesTest(v, bytes) - predefinedBytesTestNotFomZeroElement(bytes, v) + protected def roundTripTest[V <: Value[_ <: SType]](v: V, withVersion: Option[Byte] = None): Assertion = { + def test() = { + val bytes = ValueSerializer.serialize(v) + predefinedBytesTest(v, bytes) + predefinedBytesTestNotFomZeroElement(bytes, v) + } + withVersion match { + case Some(ver) => + VersionContext.withVersions(ver, 0) { + test() + } + case None => + test() + } } protected def predefinedBytesTest[V <: Value[_ <: SType]](v: V, bytes: Array[Byte]): Assertion = { @@ -41,7 +52,7 @@ trait SerializationSpecification extends AnyPropSpec r.positionLimit shouldBe positionLimitBefore } - //check that pos and consumed are being implented correctly + //check that pos and consumed are being implemented correctly protected def predefinedBytesTestNotFomZeroElement[V <: Value[_ <: SType]](bytes: Array[Byte], v: V): Assertion = { val randomInt = Gen.chooseNum(1, 20).sample.get val randomBytes = Gen.listOfN(randomInt, arbByte.arbitrary).sample.get.toArray diff --git a/interpreter/shared/src/test/scala/sigma/serialization/generators/ObjectGenerators.scala b/interpreter/shared/src/test/scala/sigma/serialization/generators/ObjectGenerators.scala index d4971f88c2..bd77766830 100644 --- a/interpreter/shared/src/test/scala/sigma/serialization/generators/ObjectGenerators.scala +++ b/interpreter/shared/src/test/scala/sigma/serialization/generators/ObjectGenerators.scala @@ -311,6 +311,7 @@ trait ObjectGenerators extends TypeGenerators case SAvlTree => arbAvlTree case SAny => arbAnyVal case SUnit => arbUnit + case SHeader => arbHeader case opt: SOption[a] => Arbitrary(frequency((5, None), (5, for (x <- wrappedTypeGen(opt.elemType)) yield Some(x)))) }).asInstanceOf[Arbitrary[T#WrappedType]].arbitrary @@ -694,7 +695,6 @@ trait ObjectGenerators extends TypeGenerators } yield ErgoTree.withSegregation(ZeroHeader, prop) def headerGen(stateRoot: AvlTree, parentId: Coll[Byte]): Gen[Header] = for { - id <- modifierIdBytesGen version <- arbByte.arbitrary adProofsRoot <- digest32Gen transactionRoot <- digest32Gen @@ -707,8 +707,10 @@ trait ObjectGenerators extends TypeGenerators powNonce <- nonceBytesGen powDistance <- arbBigInt.arbitrary votes <- minerVotesGen - } yield CHeader(id, version, parentId, adProofsRoot, stateRoot, transactionRoot, timestamp, nBits, - height, extensionRoot, minerPk.toGroupElement, powOnetimePk.toGroupElement, powNonce, powDistance, votes) + unparsedBytes <- collOfRange(0, 32, arbByte.arbitrary) + } yield CHeader(version, parentId, adProofsRoot, stateRoot.digest, transactionRoot, timestamp, nBits, + height, extensionRoot, minerPk.toGroupElement, powOnetimePk.toGroupElement, powNonce, powDistance, votes, + if(version > HeaderVersion.Interpreter60Version){ unparsedBytes } else {Colls.emptyColl[Byte]}) lazy val headerGen: Gen[Header] = for { stateRoot <- avlTreeGen diff --git a/interpreter/shared/src/test/scala/sigma/serialization/generators/TypeGenerators.scala b/interpreter/shared/src/test/scala/sigma/serialization/generators/TypeGenerators.scala index 81073c4849..70a215e831 100644 --- a/interpreter/shared/src/test/scala/sigma/serialization/generators/TypeGenerators.scala +++ b/interpreter/shared/src/test/scala/sigma/serialization/generators/TypeGenerators.scala @@ -16,12 +16,13 @@ trait TypeGenerators { implicit val boxTypeGen: Gen[SBox.type] = Gen.const(SBox) implicit val avlTreeTypeGen: Gen[SAvlTree.type] = Gen.const(SAvlTree) implicit val optionSigmaPropTypeGen: Gen[SOption[SSigmaProp.type]] = Gen.const(SOption(SSigmaProp)) + implicit val headerTypeGen: Gen[SHeader.type] = Gen.const(SHeader) implicit val primTypeGen: Gen[SPrimType] = Gen.oneOf[SPrimType](SBoolean, SByte, SShort, SInt, SLong, SBigInt, SGroupElement, SSigmaProp, SUnit) implicit val arbPrimType: Arbitrary[SPrimType] = Arbitrary(primTypeGen) implicit val predefTypeGen: Gen[SPredefType] = - Gen.oneOf[SPredefType](SBoolean, SByte, SShort, SInt, SLong, SBigInt, SGroupElement, SSigmaProp, SUnit, SBox, SAvlTree) + Gen.oneOf[SPredefType](SBoolean, SByte, SShort, SInt, SLong, SBigInt, SGroupElement, SSigmaProp, SUnit, SBox, SAvlTree, SHeader) implicit val arbPredefType: Arbitrary[SPredefType] = Arbitrary(predefTypeGen) implicit def genToArbitrary[T: Gen]: Arbitrary[T] = Arbitrary(implicitly[Gen[T]]) diff --git a/interpreter/shared/src/test/scala/special/sigma/SigmaTestingData.scala b/interpreter/shared/src/test/scala/special/sigma/SigmaTestingData.scala index f113a484ef..b8bf76cacf 100644 --- a/interpreter/shared/src/test/scala/special/sigma/SigmaTestingData.scala +++ b/interpreter/shared/src/test/scala/special/sigma/SigmaTestingData.scala @@ -12,7 +12,7 @@ import scorex.crypto.hash.{Blake2b256, Digest32} import scorex.util.ModifierId import sigma.ast._ import sigma.Extensions.ArrayOps -import sigmastate.eval.{CHeader, CPreHeader} +import sigmastate.eval.CPreHeader import sigmastate.helpers.TestingCommons import sigma.serialization.ErgoTreeSerializer import sigma.serialization.generators.ObjectGenerators @@ -240,17 +240,16 @@ trait SigmaTestingData extends TestingCommons with ObjectGenerators { def createAvlTreeData() = AvlTreeData( ErgoAlgos.decodeUnsafe("010180017f7f7b7f720c00007f7f7f0f01e857a626f37f1483d06af8077a008080").toColl, - AvlTreeFlags(false, true, false), - 728138553, - Some(2147483647) + AvlTreeFlags(true, true, true), + 32, + None ) val h1_instances = new CloneSet(1000, CHeader( - Helpers.decodeBytes("957f008001808080ffe4ffffc8f3802401df40006aa05e017fa8d3f6004c804a"), 0.toByte, Helpers.decodeBytes("0180dd805b0000ff5400b997fd7f0b9b00de00fb03c47e37806a8186b94f07ff"), Helpers.decodeBytes("01f07f60d100ffb970c3007f60ff7f24d4070bb8fffa7fca7f34c10001ffe39d"), - CAvlTree(createAvlTreeData()), + CAvlTree(createAvlTreeData()).digest, Helpers.decodeBytes("804101ff01000080a3ffbd006ac080098df132a7017f00649311ec0e00000100"), 1L, -1L, @@ -260,14 +259,15 @@ trait SigmaTestingData extends TestingCommons with ObjectGenerators { Helpers.decodeGroupElement("0361299207fa392231e23666f6945ae3e867b978e021d8d702872bde454e9abe9c"), Helpers.decodeBytes("7f4f09012a807f01"), CBigInt(new BigInteger("-e24990c47e15ed4d0178c44f1790cc72155d516c43c3e8684e75db3800a288", 16)), - Helpers.decodeBytes("7f0180") + Helpers.decodeBytes("7f0180"), + Colls.emptyColl[Byte] )) def create_h1(): Header = h1_instances.getNext val h1: Header = create_h1() - val h2: Header = create_h1().asInstanceOf[CHeader].copy(height = 2) + val h2: Header = new CHeader(h1.asInstanceOf[CHeader].wrappedValue.copy(height = 2)) val dlog_instances = new CloneSet(1000, ProveDlog( SigmaDsl.toECPoint(create_ge1()).asInstanceOf[EcPointType] diff --git a/sc/shared/src/test/scala/sigma/LanguageSpecificationV5.scala b/sc/shared/src/test/scala/sigma/LanguageSpecificationV5.scala index af4f93d861..0d9a032a16 100644 --- a/sc/shared/src/test/scala/sigma/LanguageSpecificationV5.scala +++ b/sc/shared/src/test/scala/sigma/LanguageSpecificationV5.scala @@ -4292,7 +4292,7 @@ class LanguageSpecificationV5 extends LanguageSpecificationBase { suite => property("Header properties equivalence") { verifyCases( Seq((h1, Expected(Success( - Helpers.decodeBytes("957f008001808080ffe4ffffc8f3802401df40006aa05e017fa8d3f6004c804a")), + Helpers.decodeBytes("cea31f0e0a794b103f65f8296a22ac8ff214e1bc75442186b90df4844c978e81")), cost = 1766, methodCostDetails(SHeaderMethods.idMethod, 10), 1766, Seq.fill(4)(2002)))), existingPropTest("id", { (x: Header) => x.id })) @@ -4430,18 +4430,10 @@ class LanguageSpecificationV5 extends LanguageSpecificationBase { suite => ) val header = CHeader( - Helpers.decodeBytes("1c597f88969600d2fffffdc47f00d8ffc555a9e85001000001c505ff80ff8f7f"), 0.toByte, Helpers.decodeBytes("7a7fe5347f09017818010062000001807f86808000ff7f66ffb07f7ad27f3362"), Helpers.decodeBytes("c1d70ad9b1ffc1fb9a715fff19807f2401017fcd8b73db017f1cff77727fff08"), - CAvlTree( - AvlTreeData( - ErgoAlgos.decodeUnsafe("54d23dd080006bdb56800100356080935a80ffb77e90b800057f00661601807f17").toColl, - AvlTreeFlags(true, true, false), - 2147483647, - None - ) - ), + Helpers.decodeBytes("54d23dd080006bdb56800100356080935a80ffb77e90b800057f00661601807f17"), Helpers.decodeBytes("5e7f1164ccd0990080c501fc0e0181cb387fc17f00ff00c7d5ff767f91ff5e68"), -7421721754642387858L, -4826493284887861030L, @@ -4451,7 +4443,8 @@ class LanguageSpecificationV5 extends LanguageSpecificationBase { suite => Helpers.decodeGroupElement("034e2d3b5f9e409e3ae8a2e768340760362ca33764eda5855f7a43487f14883300"), Helpers.decodeBytes("974651c9efff7f00"), CBigInt(new BigInteger("478e827dfa1e4b57", 16)), - Helpers.decodeBytes("01ff13") + Helpers.decodeBytes("01ff13"), + Colls.emptyColl ) val ctx = CContext( @@ -4459,7 +4452,7 @@ class LanguageSpecificationV5 extends LanguageSpecificationBase { suite => headers = Coll[Header](header), preHeader = CPreHeader( 0.toByte, - Helpers.decodeBytes("1c597f88969600d2fffffdc47f00d8ffc555a9e85001000001c505ff80ff8f7f"), + header.id, -755484979487531112L, 9223372036854775807L, 11, diff --git a/sc/shared/src/test/scala/sigmastate/TestingInterpreterSpecification.scala b/sc/shared/src/test/scala/sigmastate/TestingInterpreterSpecification.scala index 1012e9a189..6265462c6c 100644 --- a/sc/shared/src/test/scala/sigmastate/TestingInterpreterSpecification.scala +++ b/sc/shared/src/test/scala/sigmastate/TestingInterpreterSpecification.scala @@ -6,18 +6,26 @@ import sigma.ast._ import sigma.ast.syntax._ import sigmastate.interpreter._ import Interpreter._ -import sigma.ast.syntax._ +import io.circe.parser.parse import org.ergoplatform._ +import org.ergoplatform.sdk.JsonCodecs import org.scalatest.BeforeAndAfterAll import scorex.util.encode.Base58 +import sigma.Colls +import sigma.VersionContext.V6SoftForkVersion import sigma.VersionContext import sigma.crypto.CryptoConstants -import sigma.data.{AvlTreeData, CAND, ProveDlog, SigmaBoolean, TrivialProp} +import sigma.data.{CAND, CAvlTree, ProveDlog, SigmaBoolean, TrivialProp} +import sigma.interpreter.ContextExtension import sigma.util.Extensions.IntOps import sigmastate.helpers.{CompilerTestingCommons, ErgoLikeContextTesting, ErgoLikeTestInterpreter, ErgoLikeTestProvingInterpreter} import sigmastate.helpers.TestingHelpers._ -import sigma.serialization.ValueSerializer +import sigma.serialization.{GroupElementSerializer, SigmaSerializer, ValueSerializer} +import sigmastate.eval.CPreHeader +import sigmastate.helpers.ErgoLikeContextTesting.noBoxes +import sigmastate.interpreter.CErgoTreeEvaluator.DefaultEvalSettings import sigmastate.utils.Helpers._ +import sigma.util.Extensions._ import scala.util.Random @@ -30,12 +38,60 @@ class TestingInterpreterSpecification extends CompilerTestingCommons lazy val verifier = new ErgoLikeTestInterpreter - def testingContext(h: Int) = - ErgoLikeContextTesting(h, - AvlTreeData.dummy, ErgoLikeContextTesting.dummyPubkey, IndexedSeq(fakeSelf), - ErgoLikeTransaction(IndexedSeq.empty, IndexedSeq.empty), - fakeSelf, activatedVersionInTests) - .withErgoTreeVersion(ergoTreeVersionInTests) + def testingContext(h: Int = 614401) = { + + // valid header from Ergo blockchain + val headerJson = + """ + |{ + | "extensionId" : "00cce45975d87414e8bdd8146bc88815be59cd9fe37a125b5021101e05675a18", + | "votes" : "000000", + | "timestamp" : 4928911477310178288, + | "size" : 223, + | "unparsedBytes" : "", + | "stateRoot" : { + | "digest" : "5c8c00b8403d3701557181c8df800001b6d5009e2201c6ff807d71808c00019780", + | "treeFlags" : "0", + | "keyLength" : "32" + | }, + | "height" : 614400, + | "nBits" : 37748736, + | "version" : 2, + | "id" : "5603a937ec1988220fc44fb5022fb82d5565b961f005ebb55d85bd5a9e6f801f", + | "adProofsRoot" : "5d3f80dcff7f5e7f59007294c180808d0158d1ff6ba10000f901c7f0ef87dcff", + | "transactionsRoot" : "f17fffacb6ff7f7f1180d2ff7f1e24ffffe1ff937f807f0797b9ff6ebdae007e", + | "extensionRoot" : "1480887f80007f4b01cf7f013ff1ffff564a0000b9a54f00770e807f41ff88c0", + | "minerPk" : "03bedaee069ff4829500b3c07c4d5fe6b3ea3d3bf76c5c28c1d4dcdb1bed0ade0c", + | "powOnetimePk" : "0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798", + | "powNonce" : "0000000000003105", + | "powDistance" : 0, + | "adProofsId" : "dec129290a763f4de41f04e87e2b661dd59758af6bdd00dd51f5d97c3a8cb9b5", + | "transactionsId" : "eba1dd82cf51147232e09c1f72b37c554c30f63274d5093bff36849a83472a42", + | "parentId" : "ac2101807f0000ca01ff0119db227f202201007f62000177a080005d440896d0" + |} + |""".stripMargin + + object JsonCodecs extends JsonCodecs + val header1 = JsonCodecs.headerDecoder.decodeJson(parse(headerJson).toOption.get).toOption.get + + val boxesToSpend = IndexedSeq(fakeSelf) + + val preHeader = CPreHeader(activatedVersionInTests, + parentId = header1.id, + timestamp = 3, + nBits = 0, + height = h, + minerPk = GroupElementSerializer.parse(SigmaSerializer.startReader(ErgoLikeContextTesting.dummyPubkey)).toGroupElement, + votes = Colls.emptyColl[Byte] + ) + + new ErgoLikeContext( + header1.stateRoot.asInstanceOf[CAvlTree].treeData, Colls.fromArray(Array(header1)), + preHeader, noBoxes, + boxesToSpend, ErgoLikeTransaction(IndexedSeq.empty, IndexedSeq.empty), + boxesToSpend.indexOf(fakeSelf), ContextExtension.empty, vs, DefaultEvalSettings.scriptCostLimitInEvaluator, + initCost = 0L, activatedVersionInTests).withErgoTreeVersion(ergoTreeVersionInTests) + } property("Reduction to crypto #1") { forAll() { i: Int => @@ -117,7 +173,7 @@ class TestingInterpreterSpecification extends CompilerTestingCommons val dk1 = prover.dlogSecrets(0).publicImage val dk2 = prover.dlogSecrets(1).publicImage - val ctx = testingContext(99) + val ctx = testingContext() val env = Map( "dk1" -> dk1, "dk2" -> dk2, @@ -125,7 +181,9 @@ class TestingInterpreterSpecification extends CompilerTestingCommons "bytes2" -> Array[Byte](4, 5, 6), "box1" -> testBox(10, TrueTree, 0, Seq(), Map( reg1 -> IntArrayConstant(Array[Int](1, 2, 3)), - reg2 -> BoolArrayConstant(Array[Boolean](true, false, true))))) + reg2 -> BoolArrayConstant(Array[Boolean](true, false, true)) + )) + ) val prop = mkTestErgoTree(compile(env, code)(IR).asBoolValue.toSigmaProp) val challenge = Array.fill(32)(Random.nextInt(100).toByte) val proof1 = prover.prove(prop, ctx, challenge).get.proof diff --git a/sc/shared/src/test/scala/sigmastate/TypesSpecification.scala b/sc/shared/src/test/scala/sigmastate/TypesSpecification.scala index 9a4c5ce1b8..6d13863f26 100644 --- a/sc/shared/src/test/scala/sigmastate/TypesSpecification.scala +++ b/sc/shared/src/test/scala/sigmastate/TypesSpecification.scala @@ -13,8 +13,6 @@ class TypesSpecification extends SigmaTestingData { implicit val tWrapped = wrappedTypeGen(t) forAll { x: SPredefType#WrappedType => isValueOfType(x, t) shouldBe true - // since forall t. SHeader != t - isValueOfType(x, SHeader) shouldBe false } } } diff --git a/sdk/js/src/main/scala/org/ergoplatform/sdk/js/Header.scala b/sdk/js/src/main/scala/org/ergoplatform/sdk/js/Header.scala index ef53e13dbd..ec3767c1cd 100644 --- a/sdk/js/src/main/scala/org/ergoplatform/sdk/js/Header.scala +++ b/sdk/js/src/main/scala/org/ergoplatform/sdk/js/Header.scala @@ -48,5 +48,7 @@ class Header( val powDistance: js.BigInt, /** Miner votes for changing system parameters. */ - val votes: String + val votes: String, + + val unparsedBytes: String ) extends js.Object diff --git a/sdk/js/src/main/scala/org/ergoplatform/sdk/js/Isos.scala b/sdk/js/src/main/scala/org/ergoplatform/sdk/js/Isos.scala index 0f50a52686..84f2b21da8 100644 --- a/sdk/js/src/main/scala/org/ergoplatform/sdk/js/Isos.scala +++ b/sdk/js/src/main/scala/org/ergoplatform/sdk/js/Isos.scala @@ -7,9 +7,10 @@ import sigma.ast.{Constant, SType} import sigma.data.Iso import sigma.data.Iso.{isoStringToArray, isoStringToColl} import sigma.data.js.{Isos => DataIsos} +import sigma.data.CHeader import sigma.interpreter.{ContextExtension, ProverResult} import sigma.js.AvlTree -import sigmastate.eval.{CHeader, CPreHeader} +import sigmastate.eval.CPreHeader import sigmastate.fleetSdkCommon.distEsmTypesBoxesMod.Box import sigmastate.fleetSdkCommon.distEsmTypesRegistersMod.NonMandatoryRegisters import sigmastate.fleetSdkCommon.distEsmTypesTokenMod.TokenAmount @@ -27,11 +28,10 @@ object Isos { implicit val isoHeader: Iso[Header, sigma.Header] = new Iso[Header, sigma.Header] { override def to(a: Header): sigma.Header = { CHeader( - id = isoStringToColl.to(a.id), version = a.version, parentId = isoStringToColl.to(a.parentId), ADProofsRoot = isoStringToColl.to(a.ADProofsRoot), - stateRoot = AvlTree.isoAvlTree.to(a.stateRoot), + stateRootDigest = AvlTree.isoAvlTree.to(a.stateRoot).digest, transactionsRoot = isoStringToColl.to(a.transactionsRoot), timestamp = sigma.js.Isos.isoBigIntToLong.to(a.timestamp), nBits = sigma.js.Isos.isoBigIntToLong.to(a.nBits), @@ -41,7 +41,8 @@ object Isos { powOnetimePk = DataIsos.isoGroupElement.to(a.powOnetimePk), powNonce = isoStringToColl.to(a.powNonce), powDistance = sigma.js.Isos.isoBigInt.to(a.powDistance), - votes = isoStringToColl.to(a.votes) + votes = isoStringToColl.to(a.votes), + unparsedBytes = isoStringToColl.to(a.unparsedBytes) ) } override def from(b: sigma.Header): Header = { @@ -61,7 +62,8 @@ object Isos { powOnetimePk = DataIsos.isoGroupElement.from(header.powOnetimePk), powNonce = isoStringToColl.from(header.powNonce), powDistance = sigma.js.Isos.isoBigInt.from(header.powDistance), - votes = isoStringToColl.from(header.votes) + votes = isoStringToColl.from(header.votes), + unparsedBytes = isoStringToColl.from(header.unparsedBytes) ) } } diff --git a/sdk/shared/src/main/scala/org/ergoplatform/sdk/DataJsonEncoder.scala b/sdk/shared/src/main/scala/org/ergoplatform/sdk/DataJsonEncoder.scala index fc95b77e61..6c0c866baf 100644 --- a/sdk/shared/src/main/scala/org/ergoplatform/sdk/DataJsonEncoder.scala +++ b/sdk/shared/src/main/scala/org/ergoplatform/sdk/DataJsonEncoder.scala @@ -122,6 +122,10 @@ object DataJsonEncoder { val w = SigmaSerializer.startWriter() DataSerializer.serialize(v, tpe, w) encodeBytes(w.toBytes) + case SHeader => + val w = SigmaSerializer.startWriter() + DataSerializer.serialize(v, tpe, w) + encodeBytes(w.toBytes) case SAvlTree => val w = SigmaSerializer.startWriter() DataSerializer.serialize(v, tpe, w) @@ -203,6 +207,10 @@ object DataJsonEncoder { val str = decodeBytes(json) val r = SigmaSerializer.startReader(str) DataSerializer.deserialize(SSigmaProp, r) + case SHeader => // for Sigma < 6.0 , exception will be thrown by DataSerializer + val str = decodeBytes(json) + val r = SigmaSerializer.startReader(str) + DataSerializer.deserialize(SHeader, r) case SBox => val value = decodeData(json.hcursor.downField(s"value").focus.get, SLong) val tree = ErgoTreeSerializer.DefaultSerializer.deserializeErgoTree(decodeBytes(json.hcursor.downField(s"ergoTree").focus.get)) diff --git a/sdk/shared/src/main/scala/org/ergoplatform/sdk/JsonCodecs.scala b/sdk/shared/src/main/scala/org/ergoplatform/sdk/JsonCodecs.scala index ae14fd831a..98e8011bcc 100644 --- a/sdk/shared/src/main/scala/org/ergoplatform/sdk/JsonCodecs.scala +++ b/sdk/shared/src/main/scala/org/ergoplatform/sdk/JsonCodecs.scala @@ -12,7 +12,7 @@ import scorex.crypto.hash.Digest32 import scorex.util.ModifierId import sigma.Extensions.ArrayOps import sigma.ast.{ErgoTree, EvaluatedValue, SType} -import sigma.data.{AvlTreeData, AvlTreeFlags, CBigInt, Digest32Coll, WrapperOf} +import sigma.data.{AvlTreeData, AvlTreeFlags, CBigInt, CHeader, Digest32Coll, WrapperOf} import sigma.eval.Extensions.EvalIterableOps import sigma.eval.SigmaDsl import sigma.interpreter.{ContextExtension, ProverResult} @@ -125,13 +125,18 @@ trait JsonCodecs { "powOnetimePk" -> h.powOnetimePk.getEncoded.asJson, "powNonce" -> h.powNonce.asJson, "powDistance" -> h.powDistance.asJson, - "votes" -> h.votes.asJson + "votes" -> h.votes.asJson, + "unparsedBytes" -> h.unparsedBytes.asJson ).asJson }) + /** + * JSON decoder for Header instance. Field "unparsedBytes" is optional for now, to preserve compatibility + * with clients using older JSON decoders (before node 5.0.23). Better to add an (empty) field anyway if possible. + * This field could become mandatory in future. + */ implicit val headerDecoder: Decoder[Header] = Decoder.instance({ cursor => for { - id <- cursor.downField("id").as[Coll[Byte]] version <- cursor.downField("version").as[Byte] parentId <- cursor.downField("parentId").as[Coll[Byte]] adProofsRoot <- cursor.downField("adProofsRoot").as[Coll[Byte]] @@ -146,8 +151,10 @@ trait JsonCodecs { powNonce <- cursor.downField("powNonce").as[Coll[Byte]] powDistance <- cursor.downField("powDistance").as[sigma.BigInt] votes <- cursor.downField("votes").as[Coll[Byte]] - } yield new CHeader(id, version, parentId, adProofsRoot, stateRoot, transactionsRoot, timestamp, nBits, - height, extensionRoot, SigmaDsl.decodePoint(minerPk), SigmaDsl.decodePoint(powOnetimePk), powNonce, powDistance, votes) + unparsedBytes <- cursor.downField("unparsedBytes").as[Option[Coll[Byte]]] + } yield CHeader(version, parentId, adProofsRoot, stateRoot.digest, transactionsRoot, timestamp, nBits, + height, extensionRoot, SigmaDsl.decodePoint(minerPk), SigmaDsl.decodePoint(powOnetimePk), powNonce, powDistance, + votes, unparsedBytes.getOrElse(Colls.emptyColl)) }) implicit val preHeaderEncoder: Encoder[PreHeader] = Encoder.instance({ v: PreHeader => diff --git a/sdk/shared/src/test/scala/org/ergoplatform/sdk/DataJsonEncoderSpecification.scala b/sdk/shared/src/test/scala/org/ergoplatform/sdk/DataJsonEncoderSpecification.scala index c3f7b43af4..5835f399cb 100644 --- a/sdk/shared/src/test/scala/org/ergoplatform/sdk/DataJsonEncoderSpecification.scala +++ b/sdk/shared/src/test/scala/org/ergoplatform/sdk/DataJsonEncoderSpecification.scala @@ -13,7 +13,7 @@ import sigma.serialization.SerializerException import sigma.util.Extensions.{BigIntegerOps, EcpOps, SigmaBooleanOps} import sigma.Extensions.ArrayOps import sigma.eval.SigmaDsl -import sigma.{AvlTree, Box, Colls, Evaluation} +import sigma.{AvlTree, Box, Colls, Evaluation, Header, VersionContext} import sigma.serialization.SerializationSpecification import scala.annotation.nowarn @@ -22,29 +22,48 @@ import scala.reflect.ClassTag class DataJsonEncoderSpecification extends SerializationSpecification { object JsonCodecs extends JsonCodecs - def roundtrip[T <: SType](obj: T#WrappedType, tpe: T) = { - val json = DataJsonEncoder.encode(obj, tpe) - val res = DataJsonEncoder.decode(json) - res shouldBe obj + def roundtrip[T <: SType](obj: T#WrappedType, tpe: T, withVersion: Option[Byte] = None) = { + def test() = { + val json = DataJsonEncoder.encode(obj, tpe) + val res = DataJsonEncoder.decode(json) + res shouldBe obj + } + + withVersion match { + case Some(ver) => + VersionContext.withVersions(ver, 0) { + test() + } + case None => + test() + } } def testCollection[T <: SType](tpe: T) = { implicit val wWrapped = wrappedTypeGen(tpe) implicit val tT = Evaluation.stypeToRType(tpe) implicit val tagT = tT.classTag + + val withVersion = if (tpe == SHeader) { + Some(VersionContext.V6SoftForkVersion) + } else { + None + } + forAll { xs: Array[T#WrappedType] => - roundtrip[SCollection[T]](xs.toColl, SCollection(tpe)) - roundtrip[SType](xs.toColl.map(x => (x, x)).asWrappedType, SCollection(STuple(tpe, tpe))) + roundtrip[SCollection[T]](xs.toColl, SCollection(tpe), withVersion) + roundtrip[SType](xs.toColl.map(x => (x, x)).asWrappedType, SCollection(STuple(tpe, tpe)), withVersion) val nested = xs.toColl.map(x => Colls.fromItems[T#WrappedType](x, x)) - roundtrip[SCollection[SCollection[T]]](nested, SCollection(SCollection(tpe))) + roundtrip[SCollection[SCollection[T]]](nested, SCollection(SCollection(tpe)), withVersion) roundtrip[SType]( xs.toColl.map { x => val arr = Colls.fromItems[T#WrappedType](x, x) (arr, arr) }.asWrappedType, - SCollection(STuple(SCollection(tpe), SCollection(tpe))) + SCollection(STuple(SCollection(tpe), SCollection(tpe))), + withVersion ) } } @@ -54,11 +73,18 @@ class DataJsonEncoderSpecification extends SerializationSpecification { val tT = Evaluation.stypeToRType(tpe) @nowarn implicit val tag : ClassTag[T#WrappedType] = tT.classTag @nowarn implicit val tAny : RType[Any] = sigma.AnyType + + val withVersion = if (tpe == SHeader) { + Some(VersionContext.V6SoftForkVersion) + } else { + None + } + forAll { in: (T#WrappedType, T#WrappedType) => val (x,y) = (in._1, in._2) - roundtrip[SType]((x, y).asWrappedType, STuple(tpe, tpe)) - roundtrip[SType](((x, y), (x, y)).asWrappedType, STuple(STuple(tpe, tpe), STuple(tpe, tpe))) - roundtrip[SType](((x, y), ((x, y), (x, y))).asWrappedType, STuple(STuple(tpe, tpe), STuple(STuple(tpe, tpe), STuple(tpe, tpe)))) + roundtrip[SType]((x, y).asWrappedType, STuple(tpe, tpe), withVersion) + roundtrip[SType](((x, y), (x, y)).asWrappedType, STuple(STuple(tpe, tpe), STuple(tpe, tpe)), withVersion) + roundtrip[SType](((x, y), ((x, y), (x, y))).asWrappedType, STuple(STuple(tpe, tpe), STuple(STuple(tpe, tpe), STuple(tpe, tpe))), withVersion) } } @@ -98,6 +124,7 @@ class DataJsonEncoderSpecification extends SerializationSpecification { forAll { x: AvlTree => roundtrip[SAvlTree.type](x, SAvlTree) } forAll { x: Array[Byte] => roundtrip[SByteArray](x.toColl, SByteArray) } forAll { x: Box => roundtrip[SBox.type](x, SBox) } + forAll { x: Header => roundtrip[SHeader.type](x, SHeader, Some(VersionContext.V6SoftForkVersion)) } forAll { x: Option[Byte] => roundtrip[SOption[SByte.type]](x, SOption[SByte.type]) } testCollection(SOption[SLong.type]) testTuples(SOption[SLong.type]) @@ -187,25 +214,44 @@ class DataJsonEncoderSpecification extends SerializationSpecification { val tT = Evaluation.stypeToRType(tpe) @nowarn implicit val tag = tT.classTag @nowarn implicit val tAny = sigma.AnyType - forAll { x: T#WrappedType => - an[SerializerException] should be thrownBy { - DataJsonEncoder.encode(TupleColl(x, x, x).asWrappedType, STuple(tpe, tpe, tpe)) - } - // supported case - DataJsonEncoder.encode(SigmaDsl.Colls.fromItems(TupleColl(x, x)).asWrappedType, SCollection(STuple(tpe, tpe))) - // not supported case - an[SerializerException] should be thrownBy { - DataJsonEncoder.encode(SigmaDsl.Colls.fromItems(TupleColl(x, x, x)).asWrappedType, SCollection(STuple(tpe, tpe, tpe))) + def test() = { + forAll { x: T#WrappedType => + an[SerializerException] should be thrownBy { + DataJsonEncoder.encode(TupleColl(x, x, x).asWrappedType, STuple(tpe, tpe, tpe)) + } + + // supported case + DataJsonEncoder.encode(SigmaDsl.Colls.fromItems(TupleColl(x, x)).asWrappedType, SCollection(STuple(tpe, tpe))) + + // not supported case + an[SerializerException] should be thrownBy { + DataJsonEncoder.encode(SigmaDsl.Colls.fromItems(TupleColl(x, x, x)).asWrappedType, SCollection(STuple(tpe, tpe, tpe))) + } } } + + if (tpe == SHeader) { + VersionContext.withVersions(VersionContext.V6SoftForkVersion, 0) { + test() + } + } else { + test() + } } property("AnyValue") { forAll { t: SPredefType => - testAnyValue(t) - testAnyValue(SOption(t)) + if (t == SHeader) { + VersionContext.withVersions(VersionContext.V6SoftForkVersion, 1) { + testAnyValue(t) + testAnyValue(SOption(t)) + } + } else { + testAnyValue(t) + testAnyValue(SOption(t)) + } } }