diff --git a/libcrux-ml-dsa/Cargo.toml b/libcrux-ml-dsa/Cargo.toml index 3358b8678..e58fef695 100644 --- a/libcrux-ml-dsa/Cargo.toml +++ b/libcrux-ml-dsa/Cargo.toml @@ -32,9 +32,18 @@ criterion = "0.5" pqcrypto-dilithium = { version = "0.5.0" } #, default-features = false [features] +default = ["std", "mldsa44", "mldsa65", "mldsa87"] simd128 = ["libcrux-sha3/simd128", "libcrux-intrinsics/simd128"] simd256 = ["libcrux-sha3/simd256", "libcrux-intrinsics/simd256"] -acvp = [] # expose internal API for ACVP testing +acvp = [] # expose internal API for ACVP testing + +# Features for the different key sizes of ML-DSA +mldsa44 = [] +mldsa65 = [] +mldsa87 = [] + +# std support +std = [] [[bench]] name = "manual44" @@ -53,6 +62,4 @@ name = "ml-dsa" harness = false [lints.rust] -unexpected_cfgs = { level = "warn", check-cfg = [ - 'cfg(hax)', -] } +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(hax)'] } diff --git a/libcrux-ml-dsa/boring.sh b/libcrux-ml-dsa/boring.sh new file mode 100755 index 000000000..6411d6dab --- /dev/null +++ b/libcrux-ml-dsa/boring.sh @@ -0,0 +1,41 @@ +#!/bin/bash + +set -e + +SED=$(which gsed &>/dev/null && echo gsed || echo sed) + +no_clean=0 + +# Parse command line arguments. +all_args=("$@") +while [ $# -gt 0 ]; do + case "$1" in + --no-clean) no_clean=1 ;; + esac + shift +done + +# Extract the C code +if [[ "$no_clean" = 0 ]]; then + cargo clean +fi +# TODO: add feature flags for mldsa65 +./c.sh --config cg.yaml --out cg --mldsa65\ + --no-glue --no-unrolling --no-karamel_include --no-karamel_include + +clang-format-18 --style=Google -i cg/*.h + +if [[ -n "$BORINGSSL_HOME" ]]; then + echo "Copying the files into $BORINGSSL_HOME/third_party/libcrux/" + + cp cg/libcrux_*.h $BORINGSSL_HOME/third_party/libcrux/ + cp cg/code_gen.txt $BORINGSSL_HOME/third_party/libcrux/ + cp -r cg/intrinsics $BORINGSSL_HOME/third_party/libcrux/ + + # We use special files here. + cp cg/boring/eurydice_glue.h $BORINGSSL_HOME/third_party/libcrux/ + cp -r cg/boring/karamel $BORINGSSL_HOME/third_party/libcrux/ + + libcrux_rev=$(git rev-parse HEAD) + echo "libcrux: $libcrux_rev" >> $BORINGSSL_HOME/third_party/libcrux/code_gen.txt +fi diff --git a/libcrux-ml-dsa/c.sh b/libcrux-ml-dsa/c.sh new file mode 100755 index 000000000..c8025da93 --- /dev/null +++ b/libcrux-ml-dsa/c.sh @@ -0,0 +1,133 @@ +#!/usr/bin/env bash + +set -e +set -o pipefail + +if [[ -z "$CHARON_HOME" ]]; then + echo "Please set CHARON_HOME to the Charon directory" 1>&2 + exit 1 +fi +if [[ -z "$EURYDICE_HOME" ]]; then + echo "Please set EURYDICE_HOME to the Eurydice directory" 1>&2 + exit 1 +fi +if [[ -z "$KRML_HOME" ]]; then + echo "Please set KRML_HOME to the KaRaMeL directory" 1>&2 + exit 1 +fi + +portable_only=0 +no_hacl=0 +no_charon=0 +clean=0 +config=c.yaml +out=c +glue=$EURYDICE_HOME/include/eurydice_glue.h +features="" +eurydice_glue=1 +karamel_include=1 +unrolling=16 + +# Parse command line arguments. +all_args=("$@") +while [ $# -gt 0 ]; do + case "$1" in + -p | --portable) portable_only=1 ;; + --no-hacl) no_hacl=1 ;; + --no-charon) no_charon=1 ;; + -c | --clean) clean=1 ;; + --config) config="$2"; shift ;; + --out) out="$2"; shift ;; + --glue) glue="$2"; shift ;; + --mldsa65) features="${features} --cargo-arg=--no-default-features --cargo-arg=--features=mldsa65" ;; + --no-glue) eurydice_glue=0 ;; + --no-karamel_include) karamel_include=0 ;; + --no-unrolling) unrolling=0 ;; + esac + shift +done + +if [[ "$portable_only" = 1 ]]; then + export LIBCRUX_DISABLE_SIMD256=1 + export LIBCRUX_DISABLE_SIMD128=1 +fi + +# TODO: add LIBCRUX_ENABLE_SIMD128=1 LIBCRUX_ENABLE_SIMD256=1 charon invocations +if [[ "$no_charon" = 0 ]]; then + rm -rf ../libcrux_ml_dsa.llbc ../libcrux_sha3.llbc + echo "Running charon (sha3) ..." + (cd ../libcrux-sha3 && RUSTFLAGS="--cfg eurydice" $CHARON_HOME/bin/charon) + if ! [[ -f ../libcrux_sha3.llbc ]]; then + echo "😱😱😱 You are the victim of a bug." + echo "Suggestion: rm -rf ../target or cargo clean" + exit 1 + fi + echo "Running charon (ml-dsa) with $features ..." + RUSTFLAGS="--cfg eurydice" $CHARON_HOME/bin/charon $features +else + echo "Skipping charon" +fi + +mkdir -p $out +cd $out + +# Clean only when requesting it. +# Note that we can not extract for all platforms on any platform right now. +# Make sure to keep files from other platforms. +if [[ "$clean" = 1 ]]; then + rm -rf *.c *.h + rm -rf internal/*.h +fi + +# Write out infos about the used tools +[[ -z "$CHARON_REV" && -d $CHARON_HOME/.git ]] && export CHARON_REV=$(git -C $CHARON_HOME rev-parse HEAD) +[[ -z "$EURYDICE_REV" && -d $EURYDICE_HOME/.git ]] && export EURYDICE_REV=$(git -C $EURYDICE_HOME rev-parse HEAD) +[[ -z "$KRML_REV" && -d $KRML_HOME/.git ]] && export KRML_REV=$(git -C $KRML_HOME rev-parse HEAD) +[[ -z "$LIBCRUX_REV" ]] && export LIBCRUX_REV=$(git rev-parse HEAD) +if [[ -z "$FSTAR_REV" && -d $FSTAR_HOME/.git ]]; then + export FSTAR_REV=$(git -C $FSTAR_HOME rev-parse HEAD) +else + export FSTAR_REV=$(fstar.exe --version | grep commit | sed 's/commit=\(.*\)/\1/') +fi +rm -f code_gen.txt +echo "This code was generated with the following revisions:" >> code_gen.txt +echo -n "Charon: " >> code_gen.txt +echo "$CHARON_REV" >> code_gen.txt +echo -n "Eurydice: " >> code_gen.txt +echo "$EURYDICE_REV" >> code_gen.txt +echo -n "Karamel: " >> code_gen.txt +echo "$KRML_REV" >> code_gen.txt +echo -n "F*: " >> code_gen.txt +echo "$FSTAR_REV" >> code_gen.txt +echo -n "Libcrux: " >> code_gen.txt +echo "$LIBCRUX_REV" >> code_gen.txt + +# Generate header +cat spdx-header.txt > header.txt +sed -e 's/^/ * /' code_gen.txt >> header.txt +echo " */" >> header.txt + +# Run eurydice to extract the C code +echo "Running eurydice ..." +echo $EURYDICE_HOME/eurydice --config ../$config -funroll-loops $unrolling \ + --header header.txt \ + ../../libcrux_ml_dsa.llbc ../../libcrux_sha3.llbc +$EURYDICE_HOME/eurydice --debug "-dast" --config ../$config -funroll-loops $unrolling \ + --header header.txt \ + ../../libcrux_ml_dsa.llbc ../../libcrux_sha3.llbc +if [[ "$eurydice_glue" = 1 ]]; then + cp $EURYDICE_HOME/include/eurydice_glue.h . +fi + +if [[ "$karamel_include" = 1 ]]; then + echo "Copying karamel/include ..." + mkdir -p karamel + cp -R $KRML_HOME/include karamel/ +fi + +find . -type f -name '*.c' -and -not -path '*_deps*' -exec clang-format --style=Google -i "{}" \; +find . -type f -name '*.h' -and -not -path '*_deps*' -exec clang-format --style=Google -i "{}" \; +if [ -d "internal" ]; then + clang-format --style=Google -i internal/*.h +fi +clang-format --style=Google -i intrinsics/*.h diff --git a/libcrux-ml-dsa/c.yaml b/libcrux-ml-dsa/c.yaml new file mode 100644 index 000000000..556245d36 --- /dev/null +++ b/libcrux-ml-dsa/c.yaml @@ -0,0 +1,231 @@ +files: + # INTRINSICS + + - name: libcrux_intrinsics_neon + library: true + inline_static: true + api: + - [libcrux_intrinsics, arm64] + + - name: libcrux_intrinsics_avx2 + library: true + inline_static: true + api: + - [libcrux_intrinsics, avx2] + + # SHA3 (no mention of libcrux_mldsa in this section, please) + + # Keep the per-target seperation idea: each SHA3 variant in its own file + - name: libcrux_sha3_neon + api: + - [libcrux_sha3, neon, "*"] + private: + # When patterns is the only key of private, it is optional, and one may + # just specify a list of patterns that are understood to match patterns + # (not monomorphizations) + patterns: + - [libcrux_sha3, simd, arm64, "*"] + monomorphizations_of: + - [libcrux_sha3, neon, "*"] + - [libcrux_sha3, simd, arm64, "*"] + monomorphizations_using: + - [libcrux_sha3, neon, "*"] + - [libcrux_sha3, simd, arm64, "*"] + monomorphizations_exact: + - [libcrux_sha3, generic_keccak, "KeccakState_fc"] + include_in_h: + - '"intrinsics/libcrux_intrinsics_arm64.h"' + + - name: libcrux_sha3_avx2 + api: + - [libcrux_sha3, avx2, "*"] + private: + patterns: + - [libcrux_sha3, simd, avx2, "*"] + monomorphizations_of: + - [libcrux_sha3, avx2, "*"] + - [libcrux_sha3, simd, avx2, "*"] + monomorphizations_using: + # Should this also include the monomorphizations using + # core.arch.x86.__m256i? + - [libcrux_sha3, avx2, "*"] + - [libcrux_sha3, simd, avx2, "*"] + monomorphizations_exact: + - [libcrux_sha3, generic_keccak, KeccakState_55] + include_in_h: + - '"intrinsics/libcrux_intrinsics_avx2.h"' + + # Public API header for SHA3 + - name: libcrux_sha3 + inline_static: true + api: + exact: + - [libcrux_sha3, hash] + - [libcrux_sha3, sha224] + - [libcrux_sha3, sha256] + - [libcrux_sha3, sha384] + - [libcrux_sha3, sha44] + - [libcrux_sha3, keccakx1] + - [libcrux_sha3, shake128] + - [libcrux_sha3, shake256] + - [libcrux_sha3, shake128_ema] + - [libcrux_sha3, shake256_ema] + - [libcrux_sha3, sha224_ema] + - [libcrux_sha3, sha256_ema] + - [libcrux_sha3, sha384_ema] + - [libcrux_sha3, sha44_ema] + - [libcrux_sha3, portable, sha224] + - [libcrux_sha3, portable, sha256] + - [libcrux_sha3, portable, sha384] + - [libcrux_sha3, portable, sha44] + - [libcrux_sha3, portable, keccakx1] + - [libcrux_sha3, portable, shake128] + - [libcrux_sha3, portable, shake256] + + # Common parts of SHA3 (this catches stuff that hasn't matched above). Must + # come after the (more precise) patterns above concerning platform-specific hash_functions + - name: libcrux_sha3_internal + internal: + patterns: + - [libcrux_sha3, "*"] + monomorphizations_of: + - [libcrux_sha3, "*"] + monomorphizations_using: + - [libcrux_sha3, "*"] + inline_static: true + + # MLDSA: HASH FUNCTIONS (as used by mldsa) + + - name: libcrux_mldsa_neon + api: + - [libcrux_ml_dsa, vector, neon, "*"] + - [libcrux_ml_dsa, hash_functions, neon, "*"] + private: + monomorphizations_using: + - [libcrux_ml_dsa, vector, neon, "*"] + - [libcrux_ml_dsa, hash_functions, neon, "*"] + monomorphizations_of: + - [libcrux_ml_dsa, vector, neon, "*"] + - [libcrux_ml_dsa, hash_functions, neon, "*"] + + - name: libcrux_mldsa_avx2 + api: + - [libcrux_ml_dsa, vector, avx2, "*"] + - [libcrux_ml_dsa, hash_functions, avx2, "*"] + private: + monomorphizations_using: + - [libcrux_ml_dsa, vector, avx2, "*"] + - [libcrux_ml_dsa, hash_functions, avx2, "*"] + monomorphizations_of: + - [libcrux_ml_dsa, vector, avx2, "*"] + - [libcrux_ml_dsa, hash_functions, avx2, "*"] + + # This covers slightly more than the two bundles above, but this greatly + # simplifies our lives. + - name: libcrux_mldsa_portable + api: + - [libcrux_ml_dsa, vector, "*"] + - [libcrux_ml_dsa, hash_functions, portable, "*"] + private: + patterns: + - [ libcrux_ml_dsa, polynomial, "*" ] + monomorphizations_using: + - [ libcrux_ml_dsa, polynomial, "*" ] + - [libcrux_ml_dsa, vector, "*"] + - [libcrux_ml_dsa, hash_functions, portable, "*"] + monomorphizations_of: + - [ libcrux_ml_dsa, polynomial, "*" ] + - [libcrux_ml_dsa, vector, "*"] + - [libcrux_ml_dsa, hash_functions, portable, "*"] + + # MLDSA: MISC NON-ARCHITECTURE SPECIFIC HEADERS + - name: libcrux_core + private: + monomorphizations_of: + - [ core, "*"] + - [ libcrux_ml_dsa, types, "*"] + - [ libcrux_ml_dsa, constant_time_ops, "*"] + - [ libcrux_ml_dsa, utils, "*" ] + monomorphizations_using: + - [ Eurydice, "*" ] + - [ libcrux_ml_dsa, types, "*"] + patterns: + - [ core, "*"] + - [ libcrux_ml_dsa, types ] + - [ libcrux_ml_dsa, constants ] + - [ libcrux_ml_dsa, constant_time_ops, "*"] + - [ libcrux_ml_dsa, utils, "*" ] + api: + - [Eurydice, "*"] + + # MLDSA-44 + + - name: libcrux_mldsa44_avx2 + api: + - [libcrux_ml_dsa, mldsa44, avx2] + + - name: libcrux_mldsa44_neon + api: + - [libcrux_ml_dsa, mldsa44, neon] + + - name: libcrux_mldsa44_portable + api: + - [libcrux_ml_dsa, mldsa44, portable] + + # This one contains common definitions like types, etc. + - name: libcrux_mldsa44 + api: + - [libcrux_ml_dsa, mldsa44] + + # MLDSA-65 + + - name: libcrux_mldsa65_avx2 + api: + - [libcrux_ml_dsa, mldsa65, avx2] + + - name: libcrux_mldsa65_neon + api: + - [libcrux_ml_dsa, mldsa65, neon] + + - name: libcrux_mldsa65_portable + api: + - [libcrux_ml_dsa, mldsa65, portable] + + # This one contains common definitions like types, etc. + - name: libcrux_mldsa65 + api: + - [libcrux_ml_dsa, mldsa65] + + # MLDSA-87 + + - name: libcrux_mldsa87_avx2 + api: + - [libcrux_ml_dsa, mldsa87, avx2] + + - name: libcrux_mldsa87_neon + api: + - [libcrux_ml_dsa, mldsa87, neon] + + - name: libcrux_mldsa87_portable + api: + - [libcrux_ml_dsa, mldsa87, portable] + + # This one contains common definitions like types, etc. + - name: libcrux_mldsa87 + api: + - [libcrux_ml_dsa, mldsa87] + + # We let monomorphization insert things at the right place in each one of the + # 9 variants above. Helpers that have not been assigned into any of the + # individual libcrux_mldsaXXX files end up in a catch-all. + - name: libcrux_mldsa_common + private: + - [libcrux_ml_dsa, "*"] + inline_static: true + +naming: + skip_prefix: + - [ core, core_arch, arm_shared, neon ] + - [ core, core_arch, x86 ] + - [libcrux_intrinsics, arm64] + - [libcrux_intrinsics, avx2] diff --git a/libcrux-ml-dsa/cg.yaml b/libcrux-ml-dsa/cg.yaml new file mode 100644 index 000000000..76d5bf23d --- /dev/null +++ b/libcrux-ml-dsa/cg.yaml @@ -0,0 +1,118 @@ +files: + # INTRINSICS + - name: libcrux_intrinsics_avx2 + library: true + inline_static: true + api: + - [libcrux_intrinsics, avx2] + + # # Constant time ops + # - name: libcrux_ct_ops + # inline_static: true + # api: + # - [libcrux_ml_kem, constant_time_ops] + + # SHA3 (no mention of libcrux_mlkem in this section, please) + + - name: libcrux_sha3_avx2 + inline_static: true + target: "avx2" + api: + patterns: + - [libcrux_sha3, avx2, "*"] + - [libcrux_sha3, simd, avx2, "*"] + monomorphizations_exact: + - [libcrux_sha3, generic_keccak, KeccakState_55] + monomorphizations_of: + - [libcrux_sha3, avx2, "*"] + - [libcrux_sha3, simd, avx2, "*"] + monomorphizations_using: + # Should this also include the monomorphizations using + # core.arch.x86.__m256i? + - [libcrux_sha3, avx2, "*"] + - [libcrux_sha3, simd, avx2, "*"] + include_in_h: + - '"intrinsics/libcrux_intrinsics_avx2.h"' + + # Portable SHA3 + - name: libcrux_sha3_portable + inline_static: true + api: + patterns: + - [libcrux_sha3, "*"] + monomorphizations_of: + - [libcrux_sha3, "*"] + monomorphizations_using: + - [libcrux_sha3, "*"] + + # MLKEM: MISC NON-ARCHITECTURE SPECIFIC HEADERS + - name: libcrux_core + inline_static: true + private: + monomorphizations_of: + - [core, "*"] + - [libcrux_ml_dsa, types, "*"] + - [libcrux_ml_dsa, utils, "*" ] + monomorphizations_using: + - [Eurydice, "*" ] + - [libcrux_ml_dsa, types, "*"] + patterns: + - [core, "*"] + - [libcrux_ml_dsa, types, "*" ] + - [libcrux_ml_dsa, constants ] + - [libcrux_ml_dsa, utils, "*" ] + - [libcrux_ml_dsa, simd, traits ] + api: + - [Eurydice, "*"] + + # MLDSA-65 + + - name: libcrux_mldsa65_avx2 + inline_static: true + target: "avx2" + include_in_h: + - '"intrinsics/libcrux_intrinsics_avx2.h"' + api: + patterns: + - [libcrux_ml_dsa, simd, avx2, "*"] + - [libcrux_ml_dsa, hash_functions, simd256, "*"] + - [libcrux_ml_dsa, ml_dsa_65, avx2, "*"] + - [libcrux_ml_dsa, ml_dsa_generic, instantiations, avx2, "*"] + # - [libcrux_ml_dsa, polynomial, "*" ] + monomorphizations_of: + - [libcrux_ml_dsa, simd, avx2, "*"] + - [libcrux_ml_dsa, hash_functions, simd256, "*"] + - [libcrux_ml_dsa, ml_dsa_65, avx2, "*"] + - [libcrux_ml_dsa, ml_dsa_generic, instantiations, avx2, "*"] + monomorphizations_using: + - [libcrux_ml_dsa, simd, avx2, "*"] + - [libcrux_ml_dsa, hash_functions, simd256, "*"] + + - name: libcrux_mldsa65_portable + inline_static: true + api: + patterns: + - [libcrux_ml_dsa, "*"] + - [libcrux_ml_dsa, simd, "*"] + - [libcrux_ml_dsa, hash_functions, portable, "*"] + - [libcrux_ml_dsa, ml_dsa_65, portable, "*"] + - [libcrux_ml_dsa, ml_dsa_generic, instantiations, portable, "*"] + monomorphizations_of: + - [libcrux_ml_dsa, polynomial, "*" ] + - [libcrux_ml_dsa, simd, "*"] + - [libcrux_ml_dsa, hash_functions, portable, "*"] + - [libcrux_ml_dsa, ml_dsa_65, portable] + - [libcrux_ml_dsa, ml_dsa_generic, instantiations, portable, "*"] + monomorphizations_using: + - [libcrux_ml_dsa, polynomial, "*" ] + - [libcrux_ml_dsa, simd, "*"] + - [libcrux_ml_dsa, hash_functions, portable, "*"] + - [libcrux_ml_dsa, ml_dsa_generic, instantiations, portable, "*"] + +naming: + skip_prefix: + - [ core, core_arch, arm_shared, neon ] + - [ core, core_arch, x86 ] + - [ core, option ] + - [ core, result ] + - [ core, array ] diff --git a/libcrux-ml-dsa/cg/code_gen.txt b/libcrux-ml-dsa/cg/code_gen.txt new file mode 100644 index 000000000..19672611e --- /dev/null +++ b/libcrux-ml-dsa/cg/code_gen.txt @@ -0,0 +1,6 @@ +This code was generated with the following revisions: +Charon: 45f5a34f336e35c6cc2253bc90cbdb8d812cefa9 +Eurydice: e2db6e88adc9995ca9d3dedf7fa9bc4095e9ca20 +Karamel: 8c3612018c25889288da6857771be3ad03b75bcd +F*: 5643e656b989aca7629723653a2570c7df6252b9-dirty +Libcrux: ef3ee2539580595003c62a749034ae0c76d22a0d diff --git a/libcrux-ml-dsa/cg/header.txt b/libcrux-ml-dsa/cg/header.txt new file mode 100644 index 000000000..cd14c7d06 --- /dev/null +++ b/libcrux-ml-dsa/cg/header.txt @@ -0,0 +1,12 @@ +/* + * SPDX-FileCopyrightText: 2024 Cryspen Sarl + * + * SPDX-License-Identifier: MIT or Apache-2.0 + * + * This code was generated with the following revisions: + * Charon: 45f5a34f336e35c6cc2253bc90cbdb8d812cefa9 + * Eurydice: e2db6e88adc9995ca9d3dedf7fa9bc4095e9ca20 + * Karamel: 8c3612018c25889288da6857771be3ad03b75bcd + * F*: 5643e656b989aca7629723653a2570c7df6252b9-dirty + * Libcrux: ef3ee2539580595003c62a749034ae0c76d22a0d + */ diff --git a/libcrux-ml-dsa/cg/spdx-header.txt b/libcrux-ml-dsa/cg/spdx-header.txt new file mode 100644 index 000000000..f83a84ab6 --- /dev/null +++ b/libcrux-ml-dsa/cg/spdx-header.txt @@ -0,0 +1,5 @@ +/* + * SPDX-FileCopyrightText: 2024 Cryspen Sarl + * + * SPDX-License-Identifier: MIT or Apache-2.0 + * diff --git a/libcrux-ml-dsa/hax.py b/libcrux-ml-dsa/hax.py index e8d2ba309..5d10da3fe 100755 --- a/libcrux-ml-dsa/hax.py +++ b/libcrux-ml-dsa/hax.py @@ -85,6 +85,7 @@ def __call__(self, parser, args, values, option_string=None) -> None: "-libcrux_ml_dsa::hash_functions::simd256::*", "-libcrux_ml_dsa::hash_functions::neon::*", "+:libcrux_ml_dsa::hash_functions::*::*", + "-**::types::non_hax_impls::**", ] include_str = " ".join(includes) interface_include = "+**" diff --git a/libcrux-ml-dsa/profile.json b/libcrux-ml-dsa/profile.json new file mode 100644 index 000000000..4c5f8be16 --- /dev/null +++ b/libcrux-ml-dsa/profile.json @@ -0,0 +1 @@ +{"meta":{"categories":[{"name":"Other","color":"grey","subcategories":["Other"]},{"name":"User","color":"yellow","subcategories":["Other"]},{"name":"Kernel","color":"orange","subcategories":["Other"]}],"debug":false,"extensions":{"baseURL":[],"id":[],"length":0,"name":[]},"interval":1.0,"preprocessedProfileVersion":46,"processType":0,"product":"/home/franziskus/libcrux/target/release/examples/sign_44","sampleUnits":{"eventDelay":"ms","threadCPUDelta":"µs","time":"ms"},"startTime":1732989457679.1018,"symbolicated":false,"pausedRanges":[],"version":24,"usesOnlyOneStackType":true,"doesNotUseFrameImplementation":true,"sourceCodeIsNotOnSearchfox":true,"markerSchema":[]},"libs":[{"name":"libc.so.6","path":"/usr/lib/x86_64-linux-gnu/libc.so.6","debugName":"libc.so.6","debugPath":"/usr/lib/x86_64-linux-gnu/libc.so.6","breakpadId":"84EF0F492403910C833978D494D39E530","codeId":"490fef8403240c91833978d494d39e537409b92e","arch":null},{"name":"sign_44","path":"/home/franziskus/libcrux/target/release/examples/sign_44","debugName":"sign_44","debugPath":"/home/franziskus/libcrux/target/release/examples/sign_44","breakpadId":"43E25B1307697D9D59DC1136BE04A9EA0","codeId":"135be24369079d7d59dc1136be04a9ea763c5d94","arch":null}],"threads":[{"frameTable":{"length":1,"address":[962699],"inlineDepth":[0],"category":[1],"subcategory":[0],"func":[0],"nativeSymbol":[null],"innerWindowID":[null],"implementation":[null],"line":[null],"column":[null],"optimizations":[null]},"funcTable":{"length":1,"name":[1],"isJS":[false],"relevantForJS":[false],"resource":[0],"fileName":[null],"lineNumber":[null],"columnNumber":[null]},"markers":{"length":0,"category":[],"data":[],"endTime":[],"name":[],"phase":[],"startTime":[]},"name":"samply","isMainThread":true,"nativeSymbols":{"length":0,"address":[],"functionSize":[],"libIndex":[],"name":[]},"pausedRanges":[],"pid":"1785864","processName":"samply","processShutdownTime":2160792986.873682,"processStartupTime":0.0,"processType":"default","registerTime":0.0,"resourceTable":{"length":1,"lib":[0],"name":[0],"host":[null],"type":[1]},"samples":{"length":6,"stack":[0,0,0,0,0,0],"time":[2160792986.835161,2160792986.846641,2160792986.851841,2160792986.857222,2160792986.862572,2160792986.868022],"weight":[1,1,1,1,1,1],"weightType":"samples","threadCPUDelta":[0,11,5,5,5,5]},"stackTable":{"length":1,"prefix":[null],"frame":[0],"category":[1],"subcategory":[0]},"stringArray":["libc.so.6","0xeb08b"],"tid":"1785864","unregisterTime":2160792986.873682},{"frameTable":{"length":2109,"address":[-1,596761,594870,591274,469981,600195,1706392,470692,600744,73246,591135,474680,52884,608335,1708036,590765,442642,475191,459577,590954,472759,72924,604615,604247,605839,1705797,608713,467241,626538,625946,471730,597546,446965,599098,473209,494644,616583,600752,69044,628169,609657,594975,451943,602085,61776,438684,600803,1708041,623686,604999,608476,474890,494612,598967,600899,72811,600140,591987,631619,625166,458761,601892,623646,602061,56874,625952,460080,74209,605399,600736,71136,600760,67005,62389,71747,451805,603181,600768,63971,63932,441681,598995,436349,625843,469724,625042,625099,594842,610071,60782,68576,600914,471756,494473,596671,603722,625711,625178,610039,1706413,611162,603272,1706359,464236,610079,61823,73868,586530,627341,625820,465241,597524,70573,472213,494650,596310,64623,70724,612293,1708046,55829,625662,472918,494564,65972,608572,60560,461712,624682,596599,619053,1706540,625619,596658,461866,600329,72698,474199,602053,53333,625026,608540,1706402,590221,611149,73746,472036,67178,61927,74430,623085,609415,590871,1706492,72334,474694,55923,609674,603199,605874,630857,472831,72801,608556,55287,440745,53404,609833,1706378,603097,60646,603112,472709,76468,600587,592584,631099,625072,611167,600863,460603,598347,611065,449984,603785,608114,591944,625172,65053,605034,61605,587699,70185,625302,612277,461934,600726,69995,72235,74937,467721,609985,600728,1706373,602377,610777,596644,440390,74164,622497,474572,70646,600894,64249,594743,73149,65601,469854,68059,461026,630984,599155,68747,612071,435487,625667,594570,448810,625305,608199,631640,72531,608239,494354,76475,609685,438609,61222,62085,467339,597249,74619,602168,65597,598841,471718,610201,610000,74582,625922,443267,60078,70483,473732,76472,628100,596993,63051,76408,625201,588243,626043,440318,71843,64420,624051,72816,610055,56209,460545,71751,69464,471590,52866,623396,593804,625792,609304,474838,71073,472817,596305,596379,72638,610495,596509,596292,444991,1708083,608564,57989,597194,447814,625848,609428,592541,627627,67753,616303,53389,600496,72243,73794,473218,599046,603364,625150,609648,472937,494596,64757,76208,608276,610063,58699,594011,602953,74509,603767,617336,75491,1708051,473840,602809,473990,596813,72796,72929,72894,612430,589713,71437,75627,471976,599229,68485,598601,72292,625215,494724,622182,62944,608268,70840,69866,74949,625381,631273,609887,1708086,66681,599079,67611,453926,61539,594617,460266,609694,71251,608580,61433,435726,607216,474610,494477,596342,466140,602077,60886,625966,597048,600193,440223,631473,60905,65857,475083,597333,600787,1706368,604034,1706350,616236,442405,624034,604012,594691,597617,599061,73973,625748,608387,611504,449241,625889,494616,69096,601636,593557,64679,625444,626357,610296,474050,609433,460326,66137,627742,70367,66335,467685,68321,1706385,625091,625147,62189,594724,61227,462564,61500,72551,596779,72826,619740,66933,63709,63733,66259,65065,622565,1706086,608548,53006,596703,601508,589420,73125,59979,473122,473063,494637,596871,69453,62421,596258,594241,63568,72907,597111,450829,611264,445853,55883,625708,473389,613301,66401,623663,473941,70541,66669,472520,609679,588088,69259,623548,610047,54717,474230,602069,58415,55673,473952,494580,625240,608450,67375,71152,466114,596797,447235,602935,625556,73238,72880,607644,54212,64391,52892,68141,468652,603886,627428,603214,71388,601167,625504,615273,70454,74822,64736,59005,70308,72066,596844,597698,462438,627610,627521,596612,55400,62407,604946,437863,54757,600120,598338,71599,463710,62986,465178,57089,596831,63721,628114,611423,598875,469244,610903,466279,71511,609954,598930,623972,56901,624711,55229,611325,447355,63464,1706363,595960,56633,596716,454084,609661,441077,602760,473770,59455,608496,67895,69130,594315,625256,470577,65838,468791,65695,603993,1705776,625342,596890,611173,596573,588684,70181,603258,460804,608487,436188,72545,69788,74627,73438,63077,603103,598545,66240,450787,602019,66633,600890,53394,609783,627896,473429,458791,610504,603608,631788,53029,620491,75486,609592,597129,461047,64105,67225,601843,609528,612376,60083,589083,65885,1706264,464517,62184,70742,469860,605821,608852,596826,605392,1706101,65349,471946,494600,608872,627687,70097,602804,603347,627250,54363,54583,475047,1706295,72655,598859,73968,627971,62489,611060,438316,62616,74109,465667,594061,53749,598693,74813,67232,449012,627652,623762,66531,66097,53338,53142,611341,597030,440201,66901,64395,494568,598778,70492,74056,597083,73646,70552,457135,615515,64507,455474,631709,625599,600571,1706506,1706406,625226,620482,623776,473358,68769,63061,474270,494628,593498,597294,602791,73193,470766,607591,610936,596415,608108,620729,75282,608319,1706543,595204,438095,625229,60030,600872,73541,457354,607002,434854,74282,602503,71006,609925,458576,625223,601664,446287,60552,64178,623672,625513,624113,601834,72688,590310,72510,58835,471482,64100,448937,600153,69022,474830,623486,64239,445816,608300,63663,68883,72570,65299,53731,623267,467156,58131,600868,608468,473830,53960,596267,598358,53684,69297,448583,603075,464171,603194,456132,625050,471984,627930,606791,606515,625234,606644,57013,620245,60201,55009,473786,494512,1707776,596631,450541,623796,66383,64651,457469,472951,609145,438579,55184,603657,625102,631616,62925,74807,462657,67345,609849,589729,68889,466367,623540,473028,609258,474758,594024,73629,76317,603639,466974,1708068,54411,440083,72316,60764,471826,609289,594346,625521,71780,588398,64476,68911,457277,604158,493317,494347,598760,70354,1706355,450218,607059,63703,447048,625576,73576,458878,461909,625183,474800,494592,54735,60664,611182,450668,625275,57461,625730,597088,68605,610872,460893,598501,627458,623444,606939,599261,64797,624059,625499,66767,53034,56477,67424,612583,63033,630906,469656,67747,474144,52993,54864,623235,597053,61074,72540,467584,605341,594511,56617,1706283,65012,70666,445664,625815,466088,493367,494410,1706289,594200,596487,626064,601900,625760,596978,54611,605434,610921,70196,601851,603850,61279,597479,435373,1706340,53623,587455,1706268,1706257,596936,463944,631581,472253,596407,470155,603594,74884,65829,599324,73120,436249,625175,625540,73420,56075,597134,606826,474489,64355,472071,56740,54623,628002,72447,73043,450473,631013,452692,71162,595113,65084,440971,604194,462332,623306,631770,60437,593298,1708032,598688,440620,53640,625736,602660,55435,57919,75391,630690,625917,465746,607208,609284,599173,459485,625587,623061,594943,586970,450750,75497,56755,59280,69277,72012,594529,593450,57023,451766,446593,71241,631155,609384,599116,436516,64143,471938,611043,62136,57817,69845,448551,607005,1708058,446475,66279,631023,601956,598823,611999,74318,70017,61985,53290,468438,624828,65617,69860,609577,612475,609944,609001,438158,597215,72046,450685,437880,601611,474155,610489,609749,1706238,70816,455843,54879,447117,64066,65463,625772,470622,72102,453192,623911,608664,53644,613222,1706474,64411,71772,1706479,589444,65877,600980,62807,610772,438454,67885,625475,607445,617392,593757,452603,608292,72214,440931,600839,54607,631423,58287,70674,448662,75963,603630,472947,596784,612907,62289,602100,63416,493802,76459,604074,473808,494544,58437,615849,596766,71964,65565,68179,53826,594476,474992,603529,468454,606777,608284,68253,625355,58557,596456,74639,68463,455225,60991,461174,625259,606712,599150,72673,600886,450305,56085,474648,494541,64513,453253,596585,493342,64602,625131,619998,64891,461923,611047,73054,597168,603966,595235,587372,61876,471840,57180,450679,62250,75574,625013,610471,595475,474085,64277,627557,74817,607931,74925,452137,600316,460162,626617,630926,616327,61009,625347,463947,54347,597410,74373,467524,54847,603890,619360,57043,445841,624134,625676,609980,613150,598609,62722,72346,623517,616141,470591,594399,475160,463696,607559,71338,460274,603498,625467,73226,465706,627733,58841,438419,627767,63318,58487,628109,603177,625837,610098,612529,62303,72783,447128,627157,473482,596973,599495,76299,74726,59139,609271,595818,597303,454042,602040,73220,62698,610916,623452,603754,593160,75046,443349,493395,73048,66547,74680,606894,469337,596959,76090,464151,472725,494463,1706382,60089,70475,55013,586427,68761,435779,68903,625908,56219,631655,63698,465799,444035,58429,53558,1706469,63798,438112,453695,67625,625141,53204,466046,625493,494619,442820,67075,76441,604557,608481,494633,443028,63154,469735,626299,53631,610345,621235,66374,463759,625067,625562,474925,67201,606761,55296,586570,631531,627647,619414,68343,602043,456893,57147,54713,611612,440980,67775,611136,452580,625264,625998,65526,64647,590025,74186,471768,62730,71332,607507,67169,459298,53676,596858,1706397,59947,76041,631493,56621,52888,59125,593829,623189,62203,53523,606916,625410,70950,66405,444099,627545,452441,65731,71680,594586,587396,65735,601440,625840,474781,605346,590535,442614,473979,456594,627906,64257,452331,603048,625078,625528,607205,67455,590049,603323,445441,623874,494572,625237,623298,68718,63606,589318,63725,452228,625436,626159,600123,605215,630954,464081,1706235,440915,631042,60891,435721,471927,459070,631077,609559,1705783,611080,587772,71884,462860,54208,607457,71587,625484,609295,54497,608863,67475,458006,623325,74491,628138,61966,617679,601972,448602,74604,605853,608695,621122,63685,450744,624438,623383,473849,607286,70316,74300,624685,55419,67909,467331,455030,625192,473973,54749,65335,446824,607085,628196,75146,56995,449479,609448,53372,624790,62677,436458,473998,72417,440253,70619,56343,494575,599587,594996,598635,589629,71720,448545,625112,71872,467075,607526,470716,627993,607816,606867,589183,71428,627945,1706502,615349,625248,600019,69850,603092,70405,75373,455101,625433,631544,75521,631444,631090,598903,600387,587707,72766,452572,57847,75609,76456,623958,457120,65169,74802,607602,616256,598665,70279,445468,625294,473960,70013,69778,58295,58003,436272,596534,598576,613388,1706410,445349,625059,598765,601576,59365,59330,594412,437944,474239,598380,594837,610633,600905,70850,463423,591384,1706345,76418,612853,460846,62053,69488,473782,74191,453608,607241,606981,57869,61420,60419,587049,68311,594154,455547,471358,631589,53510,59258,593517,67491,68595,70528,72124,71058,628302,463643,57314,625470,54467,60502,440983,59623,591052,456597,73979,55539,71063,54339,74504,446870,73891,73030,471965,75840,65327,628034,472635,606641,1705780,608459,71206,74609,464113,74418,462636,73842,71019,455672,59095,596594,440628,62716,593597,598733,75845,465635,594709,59391,471692,627566,63444,602045,462318,437790,57027,67365,608708,627484,598063,447988,66803,607128,619192,67272,442439,631293,75509,76102,58273,70778,455606,71225,472127,603511,630793,593624,597993,58863,66244,72668,630817,472785,494532,71762,71024,453535,72944,69229,1708019,58527,453864,623186,627924,61544,53692,603846,61862,446274,66067,601876,599453,64200,437801,452489,628457,458547,625144,71231,72762,70657,631964,71658,460963,455502,599964,60956,599947,623155,76426,600475,456612,606897,615333,603205,60788,1706251,55387,470783,625699,59424,435547,57326,596693,457363,71202,62825,600035,475123,65004,602516,494608,625654,596319,587479,1708078,75058,601363,446364,625198,473748,56325,610615,594341,71854,53363,615374,608472,68037,456376,67035,60215,65331,448413,67228,53138,589938,452164,75176,589156,59720,472976,612934,462250,618136,627784,451937,66503,53282,440022,631727,439471,67083,603485,599686,625390,593271,53274,64789,603506,64517,471429,596564,73464,72407,617159,435830,624014,471210,607608,57516,627385,627481,64446,68165,607162,56491,73476,53874,53635,454860,53260,625880,606739,616319,76377,458796,435645,601372,72164,602931,63204,610360,72225,606549,55545,63529,69439,631584,73782,70748,603913,67724,449808,66866,493990,594052,607067,627976,599370,460626,606668,615803,625272,72747,596698,590950,472747,64489,58409,72135,454254,606611,435580,71348,494110,72619,450177,625943,595100,74295,465001,61744,55003,627806,631242,470489,608055,625680,494070,596398,72911,598720,611229,58011,1706314,61971,587041,463812,450782,55655,445407,67582,618466,76196,590356,464032,448727,603168,450758,69461,625781,617423,607298,55405,631456,620236,60524,435838,66115,462864,54899,66871,1707986,473854,450645,66651,603466,625602,469812,54477,460088,53076,587989,451914,625809,75473,464090,628141,65475,601465,450041,75827,57008,59861,53606,612385,72935,623891,451333,608089,608728,599906,460913,625204,625439,597573,602372,463578,594948,63178,56885,75023,610759,625096,615367,468859,600211,455740,625605,62831,596908,589309,602968,75294,625613,75264,72683,604088,64931,460190,458639,625784,1706497,598706,460470,57157,468378,625795,612448,69932,439184,67380,458701,70858,63953,626380,461840,601749,75246,607440,599139,588430,66517,59584,602027,470698,607605,494182,625401,596465,471794,605443,594358,75028,64927,627532,623914,69927,455483,610648,460756,57861,587388,453738,468870,597162,64087,70044,72807,470372,597072,605777,1705826,627990,598114,451993,623652,72506,588080,72560,466031,611963,76220,464581,447224,630865,608421,70564,459039,606730,63741,71477,625865,71830,71946,460104,625115,631138,442569,64781,604608,494383,76078,465222,67917,627644,67197,61329,607679,631403,468881,594444,454530,625186,591365,54086,71527,607156,57035,494603,445907,603079,467462,627463,60266,53146,594557,589327,61209,72054,71405,67091,623739,70300,69155,455584,631245,598477,599027,607454,437186,631661,65531,465413,624008,66795,56373,597011,587715,64123,63590,470614,58721,54947,617048,67827,60541,445591,623819,607468,437841,602217,52984,617744,70388,473522,75745,608012,1707981,610196,594552,74711,75810,75270,630862,599008,67217,594281,442735,66267,605626,627730,588323,457257,70040,625376,64127,75709,596152,440447,602948,596913,64383,455763,594767,445027,602824,603462,74086,459258,590971,609096,597380,455890,606924,608952,606772,615655,60179,458456,68335,452506,611666,453772,56057,455346,594431,451413,598432,590503,54615,467229,61526,631594,474792,494576,494425,75259,597818,453355,623418,630806,69652,462089,74539,462350,53011,603882,472968,60777,459626,631511,594893,448496,470271,595854,75945,597106,589947,625416,461318,612331,60528,470313,607310,609572,611882,71935,587691,70926,70758,456583,627540,64744,54008,61118,466210,607173,623481],"inlineDepth":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"category":[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],"subcategory":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"func":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108],"nativeSymbol":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null],"innerWindowID":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null],"implementation":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null],"line":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null],"column":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null],"optimizations":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null]},"funcTable":{"length":2109,"name":[0,2,3,4,5,6,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110],"isJS":[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],"relevantForJS":[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false],"resource":[-1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"fileName":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null],"lineNumber":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null],"columnNumber":[null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null]},"markers":{"length":0,"category":[],"data":[],"endTime":[],"name":[],"phase":[],"startTime":[]},"name":"sign_44","isMainThread":true,"nativeSymbols":{"length":0,"address":[],"functionSize":[],"libIndex":[],"name":[]},"pausedRanges":[],"pid":"1785864.1","processName":"sign_44","processShutdownTime":2160796743.709309,"processStartupTime":2160792986.873682,"processType":"default","registerTime":2160792986.873682,"resourceTable":{"length":2,"lib":[1,0],"name":[1,7],"host":[null,null],"type":[1,1]},"samples":{"length":3745,"stack":[0,1,2,4,6,7,9,12,14,16,17,18,20,21,22,23,25,26,27,29,30,31,32,33,35,23,36,38,39,40,41,42,44,45,47,48,49,50,52,53,54,55,56,58,59,60,61,62,64,65,66,67,68,70,72,73,74,75,54,76,78,79,80,81,82,83,84,85,86,87,89,90,91,93,94,95,96,97,99,100,102,103,105,106,107,108,109,110,111,112,114,115,116,117,119,120,121,123,124,126,127,128,129,91,131,132,133,134,135,136,137,139,140,142,143,144,145,147,148,23,149,150,151,152,154,155,23,156,157,23,158,159,161,162,163,164,166,167,168,170,171,68,172,173,175,177,49,179,180,181,182,183,184,185,186,187,188,191,192,193,194,196,197,199,200,26,201,201,202,203,22,205,91,206,207,50,208,209,211,54,212,188,213,214,215,216,217,219,220,221,222,223,224,225,226,227,54,228,229,100,230,231,39,232,233,234,235,236,237,238,239,240,243,244,245,246,247,248,249,250,50,251,252,68,254,255,159,257,258,259,260,261,144,262,263,264,265,267,221,268,269,270,271,272,273,225,274,275,276,277,278,279,281,56,282,283,284,286,221,287,288,201,289,290,291,112,292,294,295,296,297,298,299,300,302,304,305,306,307,308,310,311,221,49,313,314,315,175,316,54,317,144,314,318,320,321,322,324,325,326,294,328,330,331,332,333,334,335,336,337,339,340,22,341,342,343,344,345,346,129,348,54,349,350,351,352,354,355,356,357,359,360,362,364,365,366,368,369,370,372,373,374,375,376,377,378,379,380,252,381,383,129,221,384,272,385,387,388,182,389,151,391,392,393,394,395,396,397,398,52,399,23,401,402,404,406,407,259,342,408,409,411,413,414,415,416,417,231,418,420,421,221,422,91,423,425,426,427,354,428,76,429,430,431,432,434,23,435,436,437,159,438,439,276,440,441,442,443,444,392,259,445,221,91,446,447,448,449,450,451,452,454,455,456,68,458,91,459,175,460,461,463,464,465,467,469,470,471,473,474,23,475,343,177,477,478,479,480,392,481,482,484,426,197,486,487,488,279,489,490,491,492,494,181,495,496,497,498,499,500,164,501,188,100,503,504,505,506,129,305,76,392,188,507,509,510,512,514,516,517,519,520,521,305,23,522,523,524,525,482,526,527,528,530,531,532,533,534,535,49,536,393,537,538,140,539,540,456,541,542,544,545,546,548,549,550,551,552,237,553,143,554,555,425,556,557,558,49,559,560,561,562,49,524,563,564,565,566,252,307,567,568,569,570,571,572,573,574,575,487,576,49,577,578,579,54,580,392,581,582,583,584,585,586,587,588,590,591,592,372,594,571,595,596,597,598,569,599,600,22,321,601,602,49,392,603,604,144,605,606,344,608,609,610,181,611,612,613,614,615,616,617,415,618,619,620,622,623,624,625,626,627,628,630,631,632,119,633,227,336,634,541,635,604,636,637,638,640,307,641,350,642,643,644,9,645,646,647,648,23,650,651,380,652,541,487,653,140,654,129,655,656,657,658,659,541,175,660,661,662,663,664,665,666,667,668,670,671,352,672,673,674,675,676,677,678,679,680,681,205,682,683,685,686,305,687,688,689,691,692,693,694,695,696,697,699,561,700,701,702,703,704,705,706,707,708,709,710,23,711,712,713,553,100,579,714,715,716,718,719,221,720,392,721,181,722,724,725,726,727,728,730,731,732,733,734,735,736,737,738,739,740,49,741,742,744,745,746,747,464,748,749,750,751,753,754,416,755,252,756,128,103,561,757,758,759,760,761,541,22,762,763,764,221,765,767,768,769,159,770,771,772,773,774,775,776,704,777,778,779,608,322,780,781,782,783,784,785,223,786,787,525,305,788,789,305,418,790,791,792,793,794,796,797,798,799,392,800,801,802,803,804,805,806,807,809,810,318,811,177,812,813,23,814,815,68,816,817,68,818,40,819,114,820,372,821,822,823,824,825,826,527,827,828,829,830,109,831,832,49,833,834,835,836,837,838,839,840,841,842,843,845,846,847,848,320,128,849,850,188,738,231,851,852,853,23,392,854,470,855,856,857,148,504,858,59,859,860,861,100,863,864,91,865,791,866,867,258,487,869,221,437,271,871,819,645,872,49,873,875,876,877,878,879,507,881,414,33,344,882,883,884,885,886,887,888,889,890,892,893,894,895,402,896,221,898,899,900,901,173,902,903,904,905,907,908,220,290,909,551,910,911,912,650,52,249,913,914,915,916,917,392,741,918,919,140,921,922,521,923,554,54,924,610,925,604,927,602,789,928,929,930,931,932,221,933,152,934,240,935,613,936,938,939,772,760,620,940,941,943,944,945,207,946,702,947,948,949,491,950,951,828,952,214,953,956,49,957,958,602,959,960,904,961,406,962,610,963,964,965,54,927,966,967,968,636,969,970,971,972,305,973,974,975,976,132,977,978,420,481,981,982,983,828,984,985,986,144,987,988,863,826,989,990,304,991,388,992,993,994,208,995,996,997,22,79,999,549,794,987,1000,1001,41,1003,1004,1005,425,185,1006,1007,392,251,23,494,812,374,1008,109,1009,1011,1012,551,1013,1014,771,1015,1016,1017,1018,1019,1020,1021,1023,541,1024,1026,1027,1028,1029,49,1030,259,1031,1032,1033,1035,1036,1037,1038,54,1039,22,407,1040,1041,1042,1043,208,1044,1045,1046,22,470,1047,1048,1049,274,1050,364,2,373,1051,1052,1053,1054,1056,1057,345,1058,1059,1060,1062,47,1063,573,1064,1065,1066,531,1067,182,68,1068,1069,1070,1071,1072,536,896,1073,1074,541,1075,1076,1077,711,221,1078,933,76,1079,1080,1081,875,1082,489,687,1083,1084,1085,54,557,1086,1087,185,208,503,1088,1028,1089,68,220,1090,1091,1092,221,610,478,1093,1094,232,1095,1096,826,1097,1099,1100,1102,1103,1104,1105,402,1106,49,68,1107,1108,244,1110,1111,99,1112,634,1113,689,1114,1115,1116,666,158,259,640,342,1117,1118,23,392,1120,415,1121,1122,271,1123,1124,50,1125,208,1127,1128,221,1129,265,1130,1131,292,1132,1133,1134,1135,1136,484,49,1137,1138,1139,392,1140,1141,311,1142,1143,791,1144,1145,1146,252,1147,1148,1149,1151,1152,279,23,1153,1154,1155,1156,866,1157,1159,996,1160,402,1161,1162,49,1163,1006,1164,1165,1166,1167,1012,1168,1170,982,1171,1172,1173,1175,1176,1177,1178,1179,1180,1181,1182,1116,344,23,1183,1184,1185,1186,944,1187,1189,1190,1191,1192,555,1193,188,1194,1195,1196,601,1197,1198,1199,1200,1201,1202,1203,81,1204,679,1205,1206,1208,760,1210,456,1212,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,164,1225,1226,1227,1229,1230,1231,129,1232,1233,1234,1235,1236,1237,656,1239,1045,866,1240,1241,1242,991,1243,1244,1245,1246,1220,1247,1249,1250,1123,1251,884,1252,1254,611,532,279,1255,49,1256,1257,41,1258,1259,1260,23,1262,1263,1264,1265,1267,435,1268,1270,1271,541,1272,1273,1274,1275,813,1276,1277,1278,1279,49,1280,1281,561,1283,1284,326,730,660,1285,1286,47,1287,1288,1289,507,1290,1291,188,50,1293,525,1294,1295,173,21,227,1296,1297,1298,1300,1006,1301,1302,1303,575,35,1304,1305,1306,1307,620,1308,1309,23,261,1310,620,1311,1312,1313,1314,114,1315,1316,1317,1318,1319,1320,1322,136,49,1323,300,1324,1325,221,231,1326,68,1327,185,1328,182,688,1329,1330,1331,1333,1334,550,1335,49,1336,831,991,525,1337,1338,1339,50,1023,1340,1341,938,1342,1343,1344,1345,1346,1347,359,91,1348,1349,68,1350,1351,372,1352,1353,1354,731,456,1355,576,1356,1357,1358,26,1062,1359,1360,1361,1233,1362,1363,1364,161,392,185,738,1365,590,541,1366,413,415,1123,1367,610,1368,1369,1370,850,521,1372,1373,1374,463,1375,1376,1294,23,261,1377,1378,1379,1380,1381,261,588,1383,1384,692,1385,1386,823,1387,1389,1390,202,1391,1392,1393,413,1394,1395,1396,1397,1077,802,1398,180,1045,1399,235,1400,1401,54,68,1402,1403,1404,1405,1406,1141,201,1407,784,501,1409,847,967,1410,1411,1412,1413,21,47,159,1414,1415,1021,1416,305,1417,1418,54,1419,1420,1421,1422,986,1423,507,521,1425,402,661,1426,159,1427,1428,68,1429,1430,1431,660,1432,1434,504,1435,1436,1437,1438,1440,1441,221,1171,1442,1443,660,1444,612,1000,1436,380,1149,1445,961,373,1220,1446,151,1447,1448,1449,1450,1451,1452,1315,1454,1234,858,1455,1456,1457,482,1458,334,392,267,1459,1460,1461,22,1462,1463,115,1464,1465,1467,1225,22,1468,1469,932,1471,1472,1473,144,47,1474,803,1475,1476,1477,1478,1479,572,1481,1482,378,1483,1484,243,1485,1487,54,344,990,1488,269,1489,1490,68,1491,533,604,1492,484,1493,201,1495,1496,1497,1498,1499,345,813,188,1252,1500,1501,1502,105,1503,1504,1505,22,119,1506,1507,1509,1510,1511,1512,1513,1331,1514,626,49,1516,1517,1518,1519,158,1520,984,181,41,159,1522,22,151,1523,22,392,1524,1525,802,1526,249,1527,1528,1529,1277,1530,1531,1532,1534,1535,746,1536,455,392,969,1136,1537,1538,1540,54,1542,1543,1544,1545,1546,68,1547,1548,1549,1550,525,1551,1092,1012,1552,1553,527,1554,1555,1556,1558,1559,268,1560,1561,94,201,1563,1564,864,1565,1566,944,1567,76,1568,1570,1571,39,1572,750,1012,1573,1574,1438,23,1576,932,54,1577,1578,791,1579,1580,1581,1582,1584,1585,814,1586,1587,1588,1591,76,297,1592,1225,49,1593,1594,1595,1596,1598,755,1599,1600,1601,1602,454,692,1603,22,208,1604,330,1605,1244,1606,1607,634,784,219,1233,1608,1609,1611,40,1612,1613,602,1614,1615,1616,1617,1618,261,1619,1620,91,1621,1622,1623,1624,1625,1626,415,1627,1628,1629,1630,1631,1632,507,1633,91,571,1634,1635,1636,1637,1639,1172,1640,1471,1641,1642,144,351,634,953,1643,1225,62,818,991,1644,1646,1647,54,201,68,497,1648,1649,213,94,1650,182,54,1651,1652,1518,1653,1654,1655,1656,223,296,1657,1658,556,1659,1660,561,1661,1662,1663,1664,1665,1075,1666,1667,307,1668,1669,1670,507,223,1672,1673,1464,1674,1675,1676,1677,494,1678,1679,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,819,1692,1693,1694,1695,1696,1698,222,1700,826,764,1701,1702,1703,990,574,1704,1705,182,602,764,411,1706,692,1707,660,435,1708,133,1379,1709,812,1710,221,1711,1712,1713,1714,1715,1716,1717,1718,1719,1721,1134,372,94,1722,54,1723,1712,1724,1725,1726,49,431,1262,1727,299,1728,1113,265,392,407,1730,944,1731,289,1732,1734,1735,1736,76,977,1737,1738,208,1739,1352,1740,1427,23,1741,1742,1743,351,1606,68,320,1315,893,1744,1745,1746,1747,1748,23,261,1750,780,556,1751,23,1752,1753,227,1754,1755,1392,159,49,181,109,188,1756,1757,561,1758,140,1759,1761,1762,1763,1764,1765,1766,208,1768,478,1769,1771,602,1772,1773,22,430,1774,1775,1776,1777,1778,561,1779,245,602,221,1780,1781,525,1690,491,1782,1454,1783,1784,828,1746,1785,620,1786,1244,1787,49,83,1788,1789,1790,1791,1792,1793,987,1794,1795,185,1797,479,1202,1798,1799,1422,1800,85,1801,610,1802,1803,1277,1804,1805,1806,1807,435,692,1810,491,1811,491,1812,1308,1813,1357,1527,1814,1815,1816,274,1262,1817,1818,1819,1820,1821,437,859,261,759,1822,235,1823,1824,345,1825,1026,574,188,1580,828,541,1826,1827,1828,1829,1830,276,546,1620,1831,1832,1833,343,1834,1835,1836,1837,1838,1839,221,91,1840,1841,650,1582,1842,76,1844,1845,1846,1847,1848,1849,1850,1026,40,1851,1852,1853,643,1854,143,68,49,93,1134,1855,1856,1625,1857,1858,274,1859,1860,1861,1287,1862,1863,68,501,1864,1167,144,1865,1866,1659,1867,1868,586,1869,1870,820,1148,1872,375,1873,1874,1875,784,1876,944,1877,1878,1879,873,22,1880,996,1881,1440,91,819,1882,1464,1883,23,208,1884,158,1886,1887,1888,1889,49,392,1891,1,1892,1893,1894,54,1895,1896,1897,1898,1900,1873,1132,1902,1115,380,1611,1217,1903,1904,1905,29,1906,1907,188,610,1333,1854,1582,1908,1909,1910,1911,1912,602,116,1913,1914,1915,1916,967,426,1917,151,54,248,1918,1644,1919,1821,1479,22,68,1241,50,1921,300,1614,1922,623,392,1923,1924,1925,1926,85,1927,925,1929,1930,814,1931,828,193,1932,996,1006,646,49,1571,1722,1933,1231,1934,1937,221,1938,881,1939,1769,1940,22,1941,1942,1598,1943,1944,1945,272,1946,1948,1949,1950,1951,528,1952,1953,927,1954,828,1955,1580,1956,1957,1348,1850,1958,1943,1959,814,1960,1961,261,339,1962,1963,1964,1676,1823,1965,380,1966,507,763,1968,221,1970,1723,532,257,1971,1972,1973,85,68,1974,378,1975,760,1977,1978,1979,1980,22,1981,1983,1984,1985,1986,1987,1988,611,650,1989,507,927,365,1821,1118,465,68,1948,1990,1991,269,1682,1866,1992,1993,604,1994,1900,1995,1996,1997,1998,247,1623,610,1999,996,2000,1084,2001,364,925,704,2002,2003,1183,679,1141,1263,1551,2005,2006,833,2007,2008,1353,2009,49,340,683,2010,594,1632,2012,392,2013,2014,2015,2016,2017,2018,158,2019,1397,119,393,2020,2021,2022,1419,22,2023,2024,380,604,2025,144,2026,2027,2028,2029,1722,261,1497,2030,36,2031,22,2032,2033,2034,2036,826,2037,1014,1423,29,692,2038,1080,1833,2039,1835,2040,2042,2043,2044,2045,1887,1746,76,2046,833,2047,254,2048,1427,2049,2050,2051,2052,231,2053,23,316,2054,49,2055,1093,2056,1333,1844,135,2057,315,819,2058,2059,2060,610,2062,2063,1691,2064,357,2065,2066,2067,819,2068,2069,68,2070,2071,2072,1743,2073,2074,91,1099,2075,50,2076,2077,2078,2079,392,2080,1114,2082,2083,2084,2085,814,2086,1147,392,1814,610,305,925,1180,2087,2088,94,434,523,757,2089,2090,234,2092,2093,794,628,2094,2095,1452,2096,164,2097,2098,2099,2100,601,828,2101,2102,232,1379,2103,2104,819,1400,2106,1315,1990,1829,2107,22,1450,2108,2109,576,2110,2111,2112,2113,1829,305,81,240,2114,2034,1961,2115,39,123,2116,164,2117,2118,307,2119,2120,731,2121,1183,953,1235,2122,2123,2124,52,2125,2126,2127,2128,2129,881,2130,392,2131,395,1577,180,287,2132,1032,2133,2134,355,2136,2137,2138,2139,604,2140,665,1197,2141,2143,221,1134,2144,2145,525,2146,208,2147,2148,201,2149,2036,2150,2128,299,896,1157,2151,682,727,1580,2152,2153,1580,2155,2156,2157,2158,803,2159,2160,828,2161,1873,2107,2162,2163,2164,2165,2166,2167,375,322,1023,343,1531,1074,1972,2168,261,2169,380,604,2170,1694,2171,2172,201,1519,2173,1343,2174,2175,2176,2177,1875,164,819,1542,2178,430,2179,254,1360,2180,2181,184,1220,2182,2183,315,1866,2034,541,2184,588,615,2185,2186,305,1208,1023,634,2187,2188,22,22,85,977,2189,2190,2191,1869,828,2192,2193,2194,2195,2196,650,881,448,575,725,2197,1746,2198,2199,221,2200,2201,919,2202,147,2203,2204,1481,2205,1460,2206,2207,601,1742,2208,2029,2209,2210,305,724,2211,1844,1835,496,2212,2071,2213,1594,2137,1315,2214,2215,91,2216,2217,2218,2219,2220,2221,568,720,497,610,2222,2223,585,2224,2225,1710,2226,2227,1237,2228,2229,2230,2231,2232,135,2145,2233,378,2235,2236,1450,620,1026,152,2237,2238,1244,1459,2239,2240,2241,2242,2243,884,151,2244,577,152,2245,2246,2247,427,2221,2249,2250,2251,1265,221,2252,22,1331,2253,2254,478,2255,602,54,2256,2257,2258,507,2260,1850,1505,2261,22,2262,660,2263,734,2264,2265,2266,1882,2267,152,430,2269,2270,231,2271,2272,2273,2149,68,2274,1431,2276,299,1124,2277,1075,2278,2279,2280,604,2281,2282,824,417,2284,2285,2286,2287,520,2288,2289,1602,159,54,2290,243,2291,2292,87,2293,2294,2069,697,2296,334,2297,970,2298,2299,372,2121,2264,1243,143,602,910,2300,431,1206,1454,214,2301,2302,2303,2304,1327,763,91,2305,2306,654,2307,1333,567,1930,2308,2309,456,221,837,1693,2310,2312,527,720,2313,392,1114,2314,2315,213,2316,2317,846,2318,2319,435,925,863,2069,2320,2321,2322,798,2323,2324,201,221,541,2326,2327,2233,208,2328,2329,2330,23,1948,305,2316,1065,332,1139,2331,2333,54,159,2334,1313,859,718,2335,1020,401,755,455,2336,2258,2337,1855,794,91,2338,243,1835,49,2339,144,1520,2341,624,76,2342,2159,1166,2343,307,2344,2345,458,991,2346,484,2347,1362,2348,392,274,354,2349,1134,2350,115,2351,2352,2199,49,2353,2354,1189,2355,718,847,932,2356,585,2235,1757,2357,2358,2159,2359,2360,2361,2362,1012,494,2363,221,1549,2364,2365,1981,188,1260,2366,983,2367,155,2368,2369,243,2370,2371,523,2372,2373,2374,2375,243,2376,2377,416,2378,602,2379,2381,54,41,1017,221,151,1235,2382,2383,1507,52,1406,2349,2384,1505,2385,1267,99,2386,2387,2388,1746,1252,2389,120,2390,2391,850,2379,2392,2393,2394,2395,2396,2397,494,1598,2398,2399,2401,2402,525,2404,351,2405,456,2406,814,315,2407,2408,2409,2410,2411,2412,1293,2413,2414,2415,2416,23,1463,2417,760,983,2418,187,2419,52,2420,1825,2421,2422,68,2423,2424,2425,2426,2428,2429,2430,2431,2169,435,2432,91,2433,221,50,2434,392,2435,2436,2437,2438,2439,2440,2441,255,900,2442,2443,423,1762,2445,2446,144,2447,279,261,2448,688,2449,2451,21,2264,1686,2453,2390,430,2454,2455,2237,544,847,1373,2457,924,2458,2459,884,68,1463,494,1666,2460,1257,2461,94,2462,91,344,306,646,2463,354,109,2464,2465,2466,305,2467,143,22,945,2468,50,2469,2470,727,2471,2473,2474,380,1432,2475,159,349,2476,2477,2478,718,1829,2479,94,2480,287,392,398,2481,416,2482,2483,91,2485,2486,692,2489,768,2490,331,2491,812,1630,2492,1518,2493,2495,2496,164,368,1460,1744,1975,221,2497,2498,2499,2072,1232,2501,1245,910,2502,2503,2504,851,68,851,756,2505,2506,1267,2508,1443,2008,2509,322,2510,2511,604,2512,2513,553,1630,1603,2514,2515,334,2516,2517,1315,514,793,2250,498,2518,2519,1303,1821,1139,2520,2521,83,2522,2523,1263,1892,2524,1835,1114,401,1137,2346,561,345,541,2525,2526,2374,915,2527,2528,585,2529,2530,68,497,1179,499,2531,2532,2533,171,2534,2535,2536,2538,803,456,2317,68,835,2539,704,378,557,2541,2520,2542,2543,2544,2545,2546,620,948,647,2547,1463,2548,1723,2549,602,2550,201,2551,2552,2553,2554,2555,601,2556,295,2176,49,692,181,1829,2557,2558,1419,1380,2559,2560,2561,1402,1141,1016,2562,1779,1490,2563,2564,243,290,2565,182,794,2566,39,180,838,2567,2568,2569,2570,68,2571,2573,549,111,2574,1883,2575,2130,232,1412,2518,2576,295,2577,484,2483,2578,2579,2580,837,1911,2581,2582,2583,2584,561,388,2586,2587,2588,2589,2590,2591,875,223,1477,1123,1850,1490,68,2592,2593,2239,2594,76,2595,2596,2597,50,2598,2215,375,2084],"time":[2160792986.933803,2160792988.519015,2160792992.400195,2160792995.762403,2160792998.815276,2160793001.377188,2160793003.553313,2160793005.566374,2160793007.331471,2160793008.988195,2160793010.482035,2160793011.902194,2160793013.265872,2160793014.537918,2160793015.767643,2160793016.966748,2160793018.114341,2160793019.238254,2160793020.346377,2160793021.425549,2160793022.49159,2160793023.547812,2160793024.598263,2160793025.634135,2160793026.663836,2160793027.689057,2160793028.713018,2160793029.729148,2160793030.742219,2160793031.75321,2160793032.76503,2160793033.771941,2160793034.777401,2160793035.782042,2160793036.788363,2160793037.791223,2160793038.793284,2160793039.795044,2160793040.799435,2160793041.799795,2160793042.800365,2160793043.800826,2160793044.804216,2160793045.804547,2160793046.804817,2160793047.804248,2160793048.806958,2160793049.806639,2160793050.80608,2160793051.80567,2160793052.807831,2160793053.807281,2160793054.806622,2160793055.805922,2160793056.808142,2160793057.807573,2160793058.806803,2160793059.806044,2160793060.808254,2160793061.807615,2160793062.807105,2160793063.806375,2160793064.736444,2160793065.5216,2160793066.314327,2160793067.134133,2160793067.980781,2160793068.745986,2160793069.346009,2160793069.969091,2160793070.644045,2160793071.37006,2160793072.146236,2160793072.970733,2160793073.82491,2160793074.707168,2160793075.603206,2160793076.514925,2160793077.448064,2160793078.397774,2160793079.350673,2160793080.311093,2160793081.285643,2160793082.259932,2160793083.238192,2160793084.224593,2160793085.209713,2160793086.196523,2160793087.196983,2160793088.194484,2160793089.191344,2160793090.193835,2160793091.197075,2160793092.195515,2160793093.192406,2160793094.189766,2160793095.187287,2160793096.186317,2160793097.189888,2160793098.188638,2160793099.186609,2160793100.18628,2160793101.18467,2160793102.18307,2160793103.181681,2160793104.187781,2160793105.186842,2160793106.185162,2160793107.183613,2160793108.183813,2160793109.182823,2160793110.181584,2160793111.180494,2160793112.180825,2160793113.180045,2160793114.179236,2160793115.178546,2160793116.184476,2160793117.188817,2160793118.187377,2160793119.185748,2160793120.198899,2160793121.201729,2160793122.203509,2160793123.20463,2160793124.20209,2160793125.198041,2160793126.194121,2160793127.190651,2160793128.194232,2160793129.191792,2160793130.188923,2160793131.186673,2160793132.188763,2160793133.186994,2160793134.185044,2160793135.188605,2160793136.188365,2160793137.186326,2160793138.184656,2160793139.182936,2160793140.182977,2160793141.181727,2160793142.180588,2160793143.179568,2160793144.179939,2160793145.179239,2160793146.17848,2160793147.189541,2160793148.196271,2160793149.200002,2160793150.201722,2160793151.203473,2160793152.209303,2160793153.205074,2160793154.200134,2160793155.195764,2160793156.193185,2160793157.191005,2160793158.188296,2160793159.185986,2160793160.185536,2160793161.183977,2160793162.182477,2160793163.186468,2160793164.186548,2160793165.184919,2160793166.195069,2160793167.19966,2160793168.20423,2160793169.205361,2160793170.206501,2160793171.207502,2160793172.216172,2160793173.217543,2160793174.217553,2160793175.217454,2160793176.217244,2160793177.216434,2160793178.210695,2160793179.216645,2160793180.218626,2160793181.218606,2160793182.217027,2160793183.216147,2160793184.211337,2160793185.205898,2160793186.200768,2160793187.196318,2160793188.199039,2160793189.196279,2160793190.192809,2160793191.20152,2160793192.206161,2160793193.208371,2160793194.215292,2160793195.215383,2160793196.216473,2160793197.210724,2160793198.205164,2160793199.200044,2160793200.196795,2160793201.193405,2160793202.190455,2160793203.187906,2160793204.198717,2160793205.202767,2160793206.205718,2160793207.206638,2160793208.204008,2160793209.199639,2160793210.195939,2160793211.192479,2160793212.1907,2160793213.18837,2160793214.186271,2160793215.184481,2160793216.184151,2160793217.194722,2160793218.199623,2160793219.203223,2160793220.206114,2160793221.207404,2160793222.203634,2160793223.209975,2160793224.213165,2160793225.214266,2160793226.213706,2160793227.208257,2160793228.204207,2160793229.199677,2160793230.195718,2160793231.192288,2160793232.195639,2160793233.193249,2160793234.190479,2160793235.18802,2160793236.19319,2160793237.191481,2160793238.189411,2160793239.187251,2160793240.186722,2160793241.185442,2160793242.183693,2160793243.182324,2160793244.182784,2160793245.181815,2160793246.180915,2160793247.180426,2160793248.181346,2160793249.180736,2160793250.180057,2160793251.184777,2160793252.185748,2160793253.184488,2160793254.183159,2160793255.182289,2160793256.1827,2160793257.18176,2160793258.192661,2160793259.197961,2160793260.203302,2160793261.211442,2160793262.213593,2160793263.214823,2160793264.215844,2160793265.210594,2160793266.204964,2160793267.200305,2160793268.197355,2160793269.193965,2160793270.202806,2160793271.206656,2160793272.210197,2160793273.210567,2160793274.211078,2160793275.206318,2160793276.214499,2160793277.216049,2160793278.21654,2160793279.2169,2160793280.217041,2160793281.211521,2160793282.205671,2160793283.200842,2160793284.197682,2160793285.194292,2160793286.203073,2160793287.206863,2160793288.210364,2160793289.217275,2160793290.218505,2160793291.218886,2160793292.219017,2160793293.213257,2160793294.207197,2160793295.213678,2160793296.216328,2160793297.217069,2160793298.217439,2160793299.22302,2160793300.22314,2160793301.216901,2160793302.210641,2160793303.204841,2160793304.201382,2160793305.197462,2160793306.194112,2160793307.191273,2160793308.201803,2160793309.205674,2160793310.208114,2160793311.215435,2160793312.223665,2160793313.222686,2160793314.227926,2160793315.227327,2160793316.227377,2160793317.225098,2160793318.223018,2160793319.216148,2160793320.210989,2160793321.205909,2160793322.201169,2160793323.19707,2160793324.19482,2160793325.1921,2160793326.189671,2160793327.199311,2160793328.204842,2160793329.207652,2160793330.215163,2160793331.215723,2160793332.211864,2160793333.218405,2160793334.219625,2160793335.220046,2160793336.226977,2160793337.232127,2160793338.229738,2160793339.222098,2160793340.215998,2160793341.209829,2160793342.204459,2160793343.199799,2160793344.19707,2160793345.19403,2160793346.203111,2160793347.206831,2160793348.210692,2160793349.217872,2160793350.217953,2160793351.212793,2160793352.220363,2160793353.221294,2160793354.221174,2160793355.226285,2160793356.232585,2160793357.231536,2160793358.228376,2160793359.226017,2160793360.219957,2160793361.213217,2160793362.219068,2160793363.219948,2160793364.221309,2160793365.220069,2160793366.213959,2160793367.20799,2160793368.20405,2160793369.20006,2160793370.196391,2160793371.205011,2160793372.209602,2160793373.211812,2160793374.218743,2160793375.218863,2160793376.219914,2160793377.225994,2160793378.226075,2160793379.225435,2160793380.226036,2160793381.223996,2160793382.217267,2160793383.210797,2160793384.218198,2160793385.219489,2160793386.219779,2160793387.218569,2160793388.21423,2160793389.22103,2160793390.220681,2160793391.214791,2160793392.210141,2160793393.205402,2160793394.201452,2160793395.197122,2160793396.206863,2160793397.210014,2160793398.211974,2160793399.212354,2160793400.214265,2160793401.209325,2160793402.204286,2160793403.200096,2160793404.197446,2160793405.194437,2160793406.203517,2160793407.207248,2160793408.214098,2160793409.214679,2160793410.209709,2160793411.20467,2160793412.21325,2160793413.215281,2160793414.216271,2160793415.215731,2160793416.211892,2160793417.206642,2160793418.213853,2160793419.215653,2160793420.217924,2160793421.218584,2160793422.217735,2160793423.217375,2160793424.213005,2160793425.207926,2160793426.203056,2160793427.198836,2160793428.196767,2160793429.194027,2160793430.191538,2160793431.189379,2160793432.188779,2160793433.1874,2160793434.19792,2160793435.202731,2160793436.207601,2160793437.215452,2160793438.222762,2160793439.222483,2160793440.217843,2160793441.212103,2160793442.206334,2160793443.201614,2160793444.210825,2160793445.213465,2160793446.214956,2160793447.215176,2160793448.211737,2160793449.206717,2160793450.202457,2160793451.198628,2160793452.196738,2160793453.205729,2160793454.209399,2160793455.21192,2160793456.21417,2160793457.20946,2160793458.204551,2160793459.212001,2160793460.215702,2160793461.217052,2160793462.217993,2160793463.217473,2160793464.213543,2160793465.208314,2160793466.203594,2160793467.199684,2160793468.197615,2160793469.206655,2160793470.210276,2160793471.212726,2160793472.221577,2160793473.222628,2160793474.222708,2160793475.221378,2160793476.216919,2160793477.211109,2160793478.21765,2160793479.219151,2160793480.221081,2160793481.220242,2160793482.214782,2160793483.209292,2160793484.205523,2160793485.213253,2160793486.215734,2160793487.217304,2160793488.218455,2160793489.218465,2160793490.213046,2160793491.207616,2160793492.204166,2160793493.200347,2160793494.208747,2160793495.211898,2160793496.215238,2160793497.222019,2160793498.221799,2160793499.216069,2160793500.21175,2160793501.2068,2160793502.202441,2160793503.198681,2160793504.214442,2160793505.217642,2160793506.218943,2160793507.225103,2160793508.225764,2160793509.219334,2160793510.213154,2160793511.207474,2160793512.203805,2160793513.200375,2160793514.197116,2160793515.194296,2160793516.205027,2160793517.208857,2160793518.211388,2160793519.218688,2160793520.223399,2160793521.228789,2160793522.22739,2160793523.22069,2160793524.21543,2160793525.209771,2160793526.204851,2160793527.200582,2160793528.198223,2160793529.207163,2160793530.210764,2160793531.213214,2160793532.221945,2160793533.221815,2160793534.215865,2160793535.210286,2160793536.206366,2160793537.202206,2160793538.210317,2160793539.213198,2160793540.219318,2160793541.220609,2160793542.221209,2160793543.221599,2160793544.22323,2160793545.22207,2160793546.215981,2160793547.210091,2160793548.211421,2160793549.207332,2160793550.202872,2160793551.199392,2160793552.197153,2160793553.194563,2160793554.203944,2160793555.208004,2160793556.212115,2160793557.219415,2160793558.219756,2160793559.214436,2160793560.210577,2160793561.205867,2160793562.201637,2160793563.209698,2160793564.213888,2160793565.215879,2160793566.217229,2160793567.21864,2160793568.22083,2160793569.221451,2160793570.220531,2160793571.220152,2160793572.215722,2160793573.210602,2160793574.205703,2160793575.201464,2160793576.202094,2160793577.199305,2160793578.196465,2160793579.205696,2160793580.210806,2160793581.213467,2160793582.214137,2160793583.209977,2160793584.206708,2160793585.214518,2160793586.216989,2160793587.218489,2160793588.22177,2160793589.22756,2160793590.226641,2160793591.220321,2160793592.221051,2160793593.224062,2160793594.223312,2160793595.222793,2160793596.224643,2160793597.226634,2160793598.227104,2160793599.226795,2160793600.228125,2160793601.227426,2160793602.226786,2160793603.226236,2160793604.232287,2160793605.230387,2160793606.223098,2160793607.216038,2160793608.222878,2160793609.223679,2160793610.223669,2160793611.22878,2160793612.22987,2160793613.229001,2160793614.228411,2160793615.227502,2160793616.228012,2160793617.225982,2160793618.224613,2160793619.218143,2160793620.224914,2160793621.225384,2160793622.225045,2160793623.229936,2160793624.229516,2160793625.222477,2160793626.227307,2160793627.227208,2160793628.227748,2160793629.227128,2160793630.231859,2160793631.229909,2160793632.22398,2160793633.22898,2160793634.228941,2160793635.228241,2160793636.234082,2160793637.238492,2160793638.242163,2160793639.238703,2160793640.231313,2160793641.223584,2160793642.216464,2160793643.210354,2160793644.206595,2160793645.214315,2160793646.216816,2160793647.218766,2160793648.221257,2160793649.227207,2160793650.232868,2160793651.237868,2160793652.243569,2160793653.239989,2160793654.231119,2160793655.23464,2160793656.23477,2160793657.233211,2160793658.230331,2160793659.228341,2160793660.222832,2160793661.216652,2160793662.210892,2160793663.206003,2160793664.211503,2160793665.208014,2160793666.203974,2160793667.212025,2160793668.216176,2160793669.217856,2160793670.219047,2160793671.218787,2160793672.220098,2160793673.214838,2160793674.221179,2160793675.222419,2160793676.2241,2160793677.22311,2160793678.2175,2160793679.211841,2160793680.222741,2160793681.224262,2160793682.224562,2160793683.223393,2160793684.219013,2160793685.213213,2160793686.208264,2160793687.214064,2160793688.217315,2160793689.217555,2160793690.212605,2160793691.207636,2160793692.204526,2160793693.212777,2160793694.215687,2160793695.217558,2160793696.219248,2160793697.219499,2160793698.214279,2160793699.220759,2160793700.22341,2160793701.22395,2160793702.229411,2160793703.228241,2160793704.223292,2160793705.228732,2160793706.228993,2160793707.228563,2160793708.228073,2160793709.221534,2160793710.215064,2160793711.209434,2160793712.217785,2160793713.219836,2160793714.220856,2160793715.220397,2160793716.221558,2160793717.216138,2160793718.210898,2160793719.206049,2160793720.203029,2160793721.199799,2160793722.197,2160793723.19455,2160793724.205341,2160793725.209411,2160793726.212212,2160793727.213262,2160793728.218933,2160793729.214393,2160793730.209253,2160793731.216424,2160793732.219785,2160793733.220905,2160793734.226936,2160793735.226246,2160793736.221366,2160793737.227087,2160793738.227607,2160793739.227408,2160793740.227158,2160793741.226079,2160793742.219709,2160793743.225349,2160793744.23015,2160793745.23017,2160793746.228231,2160793747.226891,2160793748.221721,2160793749.215942,2160793750.210492,2160793751.205772,2160793752.202953,2160793753.211553,2160793754.214844,2160793755.217064,2160793756.218865,2160793757.214025,2160793758.208946,2160793759.216256,2160793760.219677,2160793761.221147,2160793762.220778,2160793763.215509,2160793764.211529,2160793765.207409,2160793766.20351,2160793767.20014,2160793768.210121,2160793769.213561,2160793770.215782,2160793771.217592,2160793772.225733,2160793773.225403,2160793774.219344,2160793775.225084,2160793776.227075,2160793777.227195,2160793778.225685,2160793779.219466,2160793780.214626,2160793781.221387,2160793782.222967,2160793783.223747,2160793784.224278,2160793785.218508,2160793786.212689,2160793787.219349,2160793788.22218,2160793789.22303,2160793790.22235,2160793791.222101,2160793792.217861,2160793793.212522,2160793794.219622,2160793795.221453,2160793796.223693,2160793797.223124,2160793798.217554,2160793799.212064,2160793800.220185,2160793801.222005,2160793802.222826,2160793803.228726,2160793804.229187,2160793805.222767,2160793806.228148,2160793807.228488,2160793808.229368,2160793809.229389,2160793810.22913,2160793811.22936,2160793812.230101,2160793813.230411,2160793814.229552,2160793815.230022,2160793816.235653,2160793817.235773,2160793818.233724,2160793819.237844,2160793820.236965,2160793821.229375,2160793822.233795,2160793823.233296,2160793824.233816,2160793825.231787,2160793826.230047,2160793827.235018,2160793828.236468,2160793829.235518,2160793830.234049,2160793831.238139,2160793832.24412,2160793833.24233,2160793834.239901,2160793835.238301,2160793836.237742,2160793837.236092,2160793838.239852,2160793839.238383,2160793840.238133,2160793841.237134,2160793842.235594,2160793843.234134,2160793844.239905,2160793845.239455,2160793846.237246,2160793847.240996,2160793848.240797,2160793849.238787,2160793850.237268,2160793851.235658,2160793852.235788,2160793853.239909,2160793854.239049,2160793855.23739,2160793856.23733,2160793857.236191,2160793858.234831,2160793859.233682,2160793860.239403,2160793861.238213,2160793862.236474,2160793863.235164,2160793864.235294,2160793865.234065,2160793866.238215,2160793867.237276,2160793868.236986,2160793869.235907,2160793870.234477,2160793871.233227,2160793872.238908,2160793873.243128,2160793874.246379,2160793875.244349,2160793876.2434,2160793877.24079,2160793878.238461,2160793879.241731,2160793880.241251,2160793881.239182,2160793882.237142,2160793883.235383,2160793884.235413,2160793885.234203,2160793886.233074,2160793887.232104,2160793888.237855,2160793889.235585,2160793890.228056,2160793891.232656,2160793892.233847,2160793893.233007,2160793894.232157,2160793895.230028,2160793896.224868,2160793897.218568,2160793898.224839,2160793899.226069,2160793900.22795,2160793901.23351,2160793902.232191,2160793903.225541,2160793904.232402,2160793905.232472,2160793906.231773,2160793907.236334,2160793908.237034,2160793909.235735,2160793910.239355,2160793911.238156,2160793912.237796,2160793913.236276,2160793914.234827,2160793915.233587,2160793916.233738,2160793917.233208,2160793918.232389,2160793919.231589,2160793920.23741,2160793921.23667,2160793922.23522,2160793923.234201,2160793924.234501,2160793925.233602,2160793926.237882,2160793927.242363,2160793928.242163,2160793929.240373,2160793930.238324,2160793931.236484,2160793932.241415,2160793933.245315,2160793934.248296,2160793935.245816,2160793936.244207,2160793937.241627,2160793938.239227,2160793939.242178,2160793940.241628,2160793941.239659,2160793942.237659,2160793943.236149,2160793944.23583,2160793945.23464,2160793946.233561,2160793947.232611,2160793948.233022,2160793949.232402,2160793950.231702,2160793951.231113,2160793952.231753,2160793953.231314,2160793954.229465,2160793955.223015,2160793956.223556,2160793957.226926,2160793958.228017,2160793959.228407,2160793960.232988,2160793961.238148,2160793962.242759,2160793963.240129,2160793964.24543,2160793965.24346,2160793966.24084,2160793967.243451,2160793968.242661,2160793969.240382,2160793970.238242,2160793971.236352,2160793972.236003,2160793973.234863,2160793974.233654,2160793975.231284,2160793976.225754,2160793977.231095,2160793978.231425,2160793979.231156,2160793980.232016,2160793981.231547,2160793982.231007,2160793983.230827,2160793984.231818,2160793985.231348,2160793986.239669,2160793987.239329,2160793988.23936,2160793989.23802,2160793990.236621,2160793991.235221,2160793992.240832,2160793993.244912,2160793994.243092,2160793995.240793,2160793996.239913,2160793997.238084,2160793998.236374,2160793999.233624,2160794000.230755,2160794001.224035,2160794002.229576,2160794003.230127,2160794004.231277,2160794005.229698,2160794006.223458,2160794007.217208,2160794008.225089,2160794009.226389,2160794010.22686,2160794011.2259,2160794012.221351,2160794013.215811,2160794014.210821,2160794015.206522,2160794016.204092,2160794017.201262,2160794018.210473,2160794019.214364,2160794020.218364,2160794021.219145,2160794022.219985,2160794023.215395,2160794024.223676,2160794025.225346,2160794026.225987,2160794027.231687,2160794028.232008,2160794029.225818,2160794030.231049,2160794031.231309,2160794032.23211,2160794033.23698,2160794034.23519,2160794035.228151,2160794036.234381,2160794037.234202,2160794038.233292,2160794039.232603,2160794040.231793,2160794041.225453,2160794042.218644,2160794043.224764,2160794044.227475,2160794045.227965,2160794046.233446,2160794047.232627,2160794048.230367,2160794049.235798,2160794050.235698,2160794051.234949,2160794052.240749,2160794053.2396,2160794054.23784,2160794055.23664,2160794056.236811,2160794057.235631,2160794058.234472,2160794059.238792,2160794060.244723,2160794061.243093,2160794062.241034,2160794063.238904,2160794064.238494,2160794065.242295,2160794066.239535,2160794067.231706,2160794068.237266,2160794069.236596,2160794070.235247,2160794071.239377,2160794072.244828,2160794073.248358,2160794074.246259,2160794075.243399,2160794076.24206,2160794077.24513,2160794078.24172,2160794079.233361,2160794080.226631,2160794081.231792,2160794082.232042,2160794083.231612,2160794084.237753,2160794085.235763,2160794086.228374,2160794087.233024,2160794088.234205,2160794089.233485,2160794090.237926,2160794091.237176,2160794092.237136,2160794093.234507,2160794094.227278,2160794095.220328,2160794096.227249,2160794097.228169,2160794098.22823,2160794099.23347,2160794100.233421,2160794101.226911,2160794102.220131,2160794103.225922,2160794104.228182,2160794105.228483,2160794106.227333,2160794107.221363,2160794108.216784,2160794109.223644,2160794110.225315,2160794111.226205,2160794112.233356,2160794113.232266,2160794114.225847,2160794115.230987,2160794116.232408,2160794117.232028,2160794118.236759,2160794119.234979,2160794120.229629,2160794121.22285,2160794122.22849,2160794123.229251,2160794124.230511,2160794125.235762,2160794126.234332,2160794127.227472,2160794128.233813,2160794129.233833,2160794130.233064,2160794131.231044,2160794132.230924,2160794133.224555,2160794134.229915,2160794135.230346,2160794136.231346,2160794137.230067,2160794138.223697,2160794139.217527,2160794140.225128,2160794141.226579,2160794142.227049,2160794143.22737,2160794144.2277,2160794145.227121,2160794146.221221,2160794147.227132,2160794148.229222,2160794149.229433,2160794150.228193,2160794151.227363,2160794152.222684,2160794153.228904,2160794154.229835,2160794155.229965,2160794156.230096,2160794157.223916,2160794158.217806,2160794159.224107,2160794160.226717,2160794161.227418,2160794162.227758,2160794163.228099,2160794164.229579,2160794165.22972,2160794166.22852,2160794167.22248,2160794168.217831,2160794169.224871,2160794170.226522,2160794171.227322,2160794172.234513,2160794173.239883,2160794174.239084,2160794175.236444,2160794176.230324,2160794177.223415,2160794178.229015,2160794179.229756,2160794180.231006,2160794181.235887,2160794182.235697,2160794183.234907,2160794184.235338,2160794185.234528,2160794186.233879,2160794187.233139,2160794188.23249,2160794189.22599,2160794190.219591,2160794191.225701,2160794192.228342,2160794193.228832,2160794194.234353,2160794195.239534,2160794196.240284,2160794197.239264,2160794198.237835,2160794199.236535,2160794200.241916,2160794201.246146,2160794202.244497,2160794203.242237,2160794204.241767,2160794205.239988,2160794206.238338,2160794207.242159,2160794208.240869,2160794209.233179,2160794210.23739,2160794211.23678,2160794212.236911,2160794213.241281,2160794214.245372,2160794215.243882,2160794216.243503,2160794217.241553,2160794218.239833,2160794219.243454,2160794220.242024,2160794221.234215,2160794222.238375,2160794223.237716,2160794224.237876,2160794225.236836,2160794226.250227,2160794227.260788,2160794228.264268,2160794229.265139,2160794230.265729,2160794231.2596,2160794232.25028,2160794233.24051,2160794234.23191,2160794235.236431,2160794236.237581,2160794237.236972,2160794238.236533,2160794239.235963,2160794240.236684,2160794241.235244,2160794242.233745,2160794243.227435,2160794244.222815,2160794245.229406,2160794246.230836,2160794247.231807,2160794248.232387,2160794249.231798,2160794250.225908,2160794251.231998,2160794252.234379,2160794253.234439,2160794254.23306,2160794255.23219,2160794256.227691,2160794257.233621,2160794258.234442,2160794259.234582,2160794260.241383,2160794261.246183,2160794262.244963,2160794263.243494,2160794264.243274,2160794265.241725,2160794266.240225,2160794267.244746,2160794268.243666,2160794269.235976,2160794270.240197,2160794271.239667,2160794272.239788,2160794273.244158,2160794274.248519,2160794275.247039,2160794276.24664,2160794277.24467,2160794278.24273,2160794279.246361,2160794280.251481,2160794281.249462,2160794282.247142,2160794283.244753,2160794284.244193,2160794285.242554,2160794286.246344,2160794287.243765,2160794288.237265,2160794289.241726,2160794290.241236,2160794291.240187,2160794292.245747,2160794293.243378,2160794294.235578,2160794295.239908,2160794296.240659,2160794297.239759,2160794298.23756,2160794299.23072,2160794300.22555,2160794301.231841,2160794302.233091,2160794303.233522,2160794304.240342,2160794305.238923,2160794306.237273,2160794307.230493,2160794308.225454,2160794309.220004,2160794310.215244,2160794311.222785,2160794312.226496,2160794313.228076,2160794314.234477,2160794315.235407,2160794316.236707,2160794317.235378,2160794318.229028,2160794319.222859,2160794320.230449,2160794321.23181,2160794322.23232,2160794323.237951,2160794324.244681,2160794325.243922,2160794326.242342,2160794327.239592,2160794328.233843,2160794329.227173,2160794330.232893,2160794331.233774,2160794332.235164,2160794333.234085,2160794334.233276,2160794335.227316,2160794336.234417,2160794337.235187,2160794338.235128,2160794339.240308,2160794340.240109,2160794341.233529,2160794342.23842,2160794343.23836,2160794344.23892,2160794345.237111,2160794346.235781,2160794347.229402,2160794348.224422,2160794349.231132,2160794350.232603,2160794351.233273,2160794352.254294,2160794353.254305,2160794354.251725,2160794355.254496,2160794356.253496,2160794357.250566,2160794358.246517,2160794359.238157,2160794360.231517,2160794361.236848,2160794362.237268,2160794363.237069,2160794364.243309,2160794365.24272,2160794366.24151,2160794367.24018,2160794368.240591,2160794369.239761,2160794370.238832,2160794371.236832,2160794372.231422,2160794373.225153,2160794374.231333,2160794375.232554,2160794376.234254,2160794377.239765,2160794378.245135,2160794379.244556,2160794380.244826,2160794381.243367,2160794382.242097,2160794383.241108,2160794384.241259,2160794385.240359,2160794386.23946,2160794387.23867,2160794388.2392,2160794389.238631,2160794390.238301,2160794391.237782,2160794392.238502,2160794393.243023,2160794394.242513,2160794395.241423,2160794396.240424,2160794397.233454,2160794398.226794,2160794399.232545,2160794400.234625,2160794401.234946,2160794402.240356,2160794403.240427,2160794404.241047,2160794405.240308,2160794406.239478,2160794407.238729,2160794408.239359,2160794409.238799,2160794410.23824,2160794411.23776,2160794412.238861,2160794413.238491,2160794414.237962,2160794415.242892,2160794416.249013,2160794417.253073,2160794418.251414,2160794419.248904,2160794420.247874,2160794421.246235,2160794422.244395,2160794423.242826,2160794424.242996,2160794425.241866,2160794426.240767,2160794427.245117,2160794428.244328,2160794429.236978,2160794430.229689,2160794431.23528,2160794432.23697,2160794433.237021,2160794434.242171,2160794435.242072,2160794436.242542,2160794437.240452,2160794438.233533,2160794439.226883,2160794440.242224,2160794441.243664,2160794442.242595,2160794443.246945,2160794444.246926,2160794445.239266,2160794446.243466,2160794447.243157,2160794448.243317,2160794449.242238,2160794450.246538,2160794451.244429,2160794452.238489,2160794453.231729,2160794454.225419,2160794455.22,2160794456.22853,2160794457.230631,2160794458.231691,2160794459.232572,2160794460.234792,2160794461.235183,2160794462.234163,2160794463.228643,2160794464.224564,2160794465.231404,2160794466.233115,2160794467.234005,2160794468.241196,2160794469.241276,2160794470.240467,2160794471.239667,2160794472.240578,2160794473.239988,2160794474.239308,2160794475.238699,2160794476.239449,2160794477.23897,2160794478.243751,2160794479.243261,2160794480.243612,2160794481.242522,2160794482.241463,2160794483.240523,2160794484.241284,2160794485.240614,2160794486.239824,2160794487.244445,2160794488.243885,2160794489.236696,2160794490.241306,2160794491.241077,2160794492.241557,2160794493.240857,2160794494.245428,2160794495.244788,2160794496.248019,2160794497.246799,2160794498.24527,2160794499.24381,2160794500.249161,2160794501.247911,2160794502.246021,2160794503.244622,2160794504.244392,2160794505.243163,2160794506.247323,2160794507.251614,2160794508.251314,2160794509.249435,2160794510.247345,2160794511.245445,2160794512.245066,2160794513.249206,2160794514.246677,2160794515.238977,2160794516.244727,2160794517.244028,2160794518.242858,2160794519.246979,2160794520.247239,2160794521.2457,2160794522.24295,2160794523.23559,2160794524.230051,2160794525.235911,2160794526.236852,2160794527.237103,2160794528.243754,2160794529.243544,2160794530.242424,2160794531.241365,2160794532.241685,2160794533.240966,2160794534.240216,2160794535.238376,2160794536.238327,2160794537.232007,2160794538.237408,2160794539.237878,2160794540.244969,2160794541.243469,2160794542.24172,2160794543.23981,2160794544.24623,2160794545.245601,2160794546.244101,2160794547.241482,2160794548.235522,2160794549.228912,2160794550.234743,2160794551.235753,2160794552.237194,2160794553.237304,2160794554.237255,2160794555.237195,2160794556.238456,2160794557.238296,2160794558.238056,2160794559.237837,2160794560.238917,2160794561.238718,2160794562.238538,2160794563.237029,2160794564.232319,2160794565.226299,2160794566.23265,2160794567.23411,2160794568.236041,2160794569.241421,2160794570.241602,2160794571.241162,2160794572.247173,2160794573.246383,2160794574.244894,2160794575.243515,2160794576.243855,2160794577.242886,2160794578.241986,2160794579.246426,2160794580.246917,2160794581.245727,2160794582.244508,2160794583.243288,2160794584.243399,2160794585.247499,2160794586.246629,2160794587.24525,2160794588.24522,2160794589.244041,2160794590.242931,2160794591.247202,2160794592.250622,2160794593.249143,2160794594.247443,2160794595.246123,2160794596.245904,2160794597.244644,2160794598.248705,2160794599.252915,2160794600.252576,2160794601.250336,2160794602.248196,2160794603.246317,2160794604.245947,2160794605.244698,2160794606.243488,2160794607.242778,2160794608.243159,2160794609.242399,2160794610.24161,2160794611.24108,2160794612.241631,2160794613.240011,2160794614.233471,2160794615.227122,2160794616.222842,2160794617.230133,2160794618.232303,2160794619.233613,2160794620.235814,2160794621.236365,2160794622.236645,2160794623.235766,2160794624.231196,2160794625.225597,2160794626.232237,2160794627.234098,2160794628.236138,2160794629.237019,2160794630.237339,2160794631.23758,2160794632.24428,2160794633.243011,2160794634.236091,2160794635.229371,2160794636.236432,2160794637.237452,2160794638.237593,2160794639.236483,2160794640.231783,2160794641.226064,2160794642.232914,2160794643.234335,2160794644.236625,2160794645.242436,2160794646.241516,2160794647.240637,2160794648.247397,2160794649.246808,2160794650.245448,2160794651.249438,2160794652.248599,2160794653.240869,2160794654.24509,2160794655.24455,2160794656.244951,2160794657.244031,2160794658.243111,2160794659.242302,2160794660.243442,2160794661.248093,2160794662.246193,2160794663.238973,2160794664.245204,2160794665.244844,2160794666.243795,2160794667.243085,2160794668.243496,2160794669.242726,2160794670.247197,2160794671.251798,2160794672.251748,2160794673.249799,2160794674.247829,2160794675.24616,2160794676.24615,2160794677.245331,2160794678.244241,2160794679.243271,2160794680.248932,2160794681.248032,2160794682.246423,2160794683.243783,2160794684.237763,2160794685.231114,2160794686.236854,2160794687.237785,2160794688.239205,2160794689.244596,2160794690.249836,2160794691.254347,2160794692.254677,2160794693.252458,2160794694.250208,2160794695.253478,2160794696.258019,2160794697.255769,2160794698.25295,2160794699.25077,2160794700.249851,2160794701.248171,2160794702.251921,2160794703.255812,2160794704.255192,2160794705.253123,2160794706.250833,2160794707.248794,2160794708.253534,2160794709.252074,2160794710.249965,2160794711.253015,2160794712.252636,2160794713.250606,2160794714.254037,2160794715.252397,2160794716.251508,2160794717.249648,2160794718.248229,2160794719.246679,2160794720.24661,2160794721.24559,2160794722.244531,2160794723.243661,2160794724.242922,2160794725.241682,2160794726.235142,2160794727.240513,2160794728.242223,2160794729.242064,2160794730.247014,2160794731.245535,2160794732.239835,2160794733.244906,2160794734.244926,2160794735.244346,2160794736.250227,2160794737.254707,2160794738.251968,2160794739.243908,2160794740.249199,2160794741.248549,2160794742.247059,2160794743.24452,2160794744.24392,2160794745.237191,2160794746.242201,2160794747.242402,2160794748.243212,2160794749.248093,2160794750.246363,2160794751.239283,2160794752.233934,2160794753.228084,2160794754.222934,2160794755.230125,2160794756.233705,2160794757.234976,2160794758.236056,2160794759.242047,2160794760.243967,2160794761.243678,2160794762.241948,2160794763.235818,2160794764.231119,2160794765.237479,2160794766.23888,2160794767.239501,2160794768.246452,2160794769.251632,2160794770.250743,2160794771.249183,2160794772.248993,2160794773.247644,2160794774.245234,2160794775.238124,2160794776.232545,2160794777.238645,2160794778.239786,2160794779.240156,2160794780.241617,2160794781.246857,2160794782.246648,2160794783.245778,2160794784.246158,2160794785.245299,2160794786.244439,2160794787.24397,2160794788.24456,2160794789.243961,2160794790.242211,2160794791.235671,2160794792.230562,2160794793.237092,2160794794.238403,2160794795.238993,2160794796.245954,2160794797.245924,2160794798.245014,2160794799.244115,2160794800.244625,2160794801.244016,2160794802.243316,2160794803.243047,2160794804.243777,2160794805.243347,2160794806.248088,2160794807.247778,2160794808.247859,2160794809.24709,2160794810.24602,2160794811.245031,2160794812.250702,2160794813.249832,2160794814.248202,2160794815.247103,2160794816.247083,2160794817.245994,2160794818.243804,2160794819.236944,2160794820.231575,2160794821.226145,2160794822.232995,2160794823.235026,2160794824.237316,2160794825.238357,2160794826.238807,2160794827.239168,2160794828.245618,2160794829.245619,2160794830.244809,2160794831.24931,2160794832.25516,2160794833.253671,2160794834.251771,2160794835.249771,2160794836.249252,2160794837.247872,2160794838.246573,2160794839.245573,2160794840.245864,2160794841.245394,2160794842.244884,2160794843.243915,2160794844.250035,2160794845.254596,2160794846.258156,2160794847.256757,2160794848.255627,2160794849.253178,2160794850.256268,2160794851.259639,2160794852.259049,2160794853.256499,2160794854.25384,2160794855.25149,2160794856.256151,2160794857.254401,2160794858.252162,2160794859.255423,2160794860.260333,2160794861.258004,2160794862.254914,2160794863.252574,2160794864.251855,2160794865.250255,2160794866.249506,2160794867.247426,2160794868.247917,2160794869.247237,2160794870.246227,2160794871.245328,2160794872.251468,2160794873.255809,2160794874.259489,2160794875.25764,2160794876.25649,2160794877.253971,2160794878.256631,2160794879.254631,2160794880.254802,2160794881.257982,2160794882.254633,2160794883.251523,2160794884.245003,2160794885.249214,2160794886.248534,2160794887.247375,2160794888.246695,2160794889.244966,2160794890.238066,2160794891.243076,2160794892.244767,2160794893.244317,2160794894.249028,2160794895.253848,2160794896.254319,2160794897.252649,2160794898.25079,2160794899.24906,2160794900.25435,2160794901.253031,2160794902.251061,2160794903.249272,2160794904.249192,2160794905.248053,2160794906.246803,2160794907.244614,2160794908.247125,2160794909.246695,2160794910.246305,2160794911.245626,2160794912.246526,2160794913.245727,2160794914.244917,2160794915.244198,2160794916.250138,2160794917.248319,2160794918.241159,2160794919.245459,2160794920.24662,2160794921.24599,2160794922.245081,2160794923.244301,2160794924.245092,2160794925.244582,2160794926.249292,2160794927.247603,2160794928.241733,2160794929.246604,2160794930.246484,2160794931.245825,2160794932.246665,2160794933.245925,2160794934.245106,2160794935.249366,2160794936.249967,2160794937.248947,2160794938.247628,2160794939.246488,2160794940.246768,2160794941.246039,2160794942.245239,2160794943.24455,2160794944.24521,2160794945.249991,2160794946.254391,2160794947.253272,2160794948.253132,2160794949.251422,2160794950.249713,2160794951.253483,2160794952.253424,2160794953.251714,2160794954.250425,2160794955.248936,2160794956.248806,2160794957.253037,2160794958.257277,2160794959.261018,2160794960.260188,2160794961.257389,2160794962.254719,2160794963.257699,2160794964.25692,2160794965.25463,2160794966.252491,2160794967.250651,2160794968.250281,2160794969.249002,2160794970.246792,2160794971.239753,2160794972.234273,2160794973.240413,2160794974.241554,2160794975.241944,2160794976.243705,2160794977.243695,2160794978.243476,2160794979.248616,2160794980.249657,2160794981.248707,2160794982.247957,2160794983.247018,2160794984.247388,2160794985.246699,2160794986.251349,2160794987.24957,2160794988.24371,2160794989.248461,2160794990.248351,2160794991.247641,2160794992.247052,2160794993.240352,2160794994.233702,2160794995.239563,2160794996.242113,2160794997.242474,2160794998.247884,2160794999.253335,2160795000.254015,2160795001.252886,2160795002.251347,2160795003.250117,2160795004.255378,2160795005.254248,2160795006.252509,2160795007.250849,2160795008.25068,2160795009.24955,2160795010.24845,2160795011.247841,2160795012.248291,2160795013.247572,2160795014.246862,2160795015.246233,2160795016.246913,2160795017.251744,2160795018.250054,2160795019.243054,2160795020.249065,2160795021.248875,2160795022.247976,2160795023.252396,2160795024.258157,2160795025.256597,2160795026.254377,2160795027.252618,2160795028.252098,2160795029.250699,2160795030.249669,2160795031.24856,2160795032.24882,2160795033.24829,2160795034.247431,2160795035.246821,2160795036.252632,2160795037.257192,2160795038.255753,2160795039.254063,2160795040.253444,2160795041.251824,2160795042.255824,2160795043.254205,2160795044.253955,2160795045.257526,2160795046.255936,2160795047.254177,2160795048.253707,2160795049.252338,2160795050.250778,2160795051.249409,2160795052.25518,2160795053.25286,2160795054.24527,2160795055.249311,2160795056.250181,2160795057.249132,2160795058.253392,2160795059.251283,2160795060.250613,2160795061.255093,2160795062.253994,2160795063.252284,2160795064.257615,2160795065.254815,2160795066.246715,2160795067.238916,2160795068.245626,2160795069.245757,2160795070.245317,2160795071.250158,2160795072.256278,2160795073.253759,2160795074.257479,2160795075.25574,2160795076.25481,2160795077.25786,2160795078.256021,2160795079.253871,2160795080.253202,2160795081.251532,2160795082.249962,2160795083.248943,2160795084.249153,2160795085.248214,2160795086.247254,2160795087.246405,2160795088.246915,2160795089.246345,2160795090.250956,2160795091.249246,2160795092.243377,2160795093.248197,2160795094.248108,2160795095.247368,2160795096.253189,2160795097.257279,2160795098.25573,2160795099.253961,2160795100.253241,2160795101.251702,2160795102.255392,2160795103.254342,2160795104.253713,2160795105.252033,2160795106.250484,2160795107.249064,2160795108.249125,2160795109.253425,2160795110.251335,2160795111.243876,2160795112.249586,2160795113.249207,2160795114.248107,2160795115.252398,2160795116.258028,2160795117.256529,2160795118.254459,2160795119.252339,2160795120.25166,2160795121.25552,2160795122.252961,2160795123.245161,2160795124.250571,2160795125.249922,2160795126.248632,2160795127.252723,2160795128.251663,2160795129.244083,2160795130.236664,2160795131.241834,2160795132.243505,2160795133.243635,2160795134.243716,2160795135.243566,2160795136.244597,2160795137.249707,2160795138.249417,2160795139.248468,2160795140.247678,2160795141.241089,2160795142.234259,2160795143.239919,2160795144.24206,2160795145.242381,2160795146.242811,2160795147.242862,2160795148.244063,2160795149.249273,2160795150.248204,2160795151.241224,2160795152.235784,2160795153.241725,2160795154.242735,2160795155.243046,2160795156.244406,2160795157.244306,2160795158.244127,2160795159.243907,2160795160.244948,2160795161.244708,2160795162.243319,2160795163.237049,2160795164.232189,2160795165.2387,2160795166.24036,2160795167.241121,2160795168.251181,2160795169.250122,2160795170.242942,2160795171.235932,2160795172.242623,2160795173.243373,2160795174.243294,2160795175.248384,2160795176.249455,2160795177.248525,2160795178.247496,2160795179.246576,2160795180.247016,2160795181.251647,2160795182.251097,2160795183.249878,2160795184.250238,2160795185.249209,2160795186.247969,2160795187.247289,2160795188.24764,2160795189.24689,2160795190.246121,2160795191.245411,2160795192.246102,2160795193.245622,2160795194.250053,2160795195.250164,2160795196.251994,2160795197.254805,2160795198.252435,2160795199.244965,2160795200.250796,2160795201.250206,2160795202.248987,2160795203.246697,2160795204.241128,2160795205.234588,2160795206.240358,2160795207.241399,2160795208.243149,2160795209.24851,2160795210.25374,2160795211.251911,2160795212.246031,2160795213.250432,2160795214.250152,2160795215.249172,2160795216.254943,2160795217.258784,2160795218.257004,2160795219.260044,2160795220.259245,2160795221.256725,2160795222.254356,2160795223.252266,2160795224.251756,2160795225.250677,2160795226.249357,2160795227.248208,2160795228.248528,2160795229.247658,2160795230.246829,2160795231.251319,2160795232.25186,2160795233.25064,2160795234.249421,2160795235.248271,2160795236.248812,2160795237.247982,2160795238.247172,2160795239.246503,2160795240.253183,2160795241.252984,2160795242.251375,2160795243.249965,2160795244.250996,2160795245.249666,2160795246.248547,2160795247.252847,2160795248.266158,2160795249.263308,2160795250.259419,2160795251.251319,2160795252.254909,2160795253.25339,2160795254.25192,2160795255.250331,2160795256.250501,2160795257.254602,2160795258.252312,2160795259.244682,2160795260.250483,2160795261.249893,2160795262.248654,2160795263.247504,2160795264.250975,2160795265.250085,2160795266.248865,2160795267.247766,2160795268.248306,2160795269.252337,2160795270.251387,2160795271.249978,2160795272.250228,2160795273.249038,2160795274.247769,2160795275.246689,2160795276.25227,2160795277.25171,2160795278.250091,2160795279.248891,2160795280.248971,2160795281.247882,2160795282.252082,2160795283.251133,2160795284.250923,2160795285.249834,2160795286.248464,2160795287.247234,2160795288.252735,2160795289.257016,2160795290.254186,2160795291.257697,2160795292.257038,2160795293.254698,2160795294.252558,2160795295.255789,2160795296.260599,2160795297.2585,2160795298.2546,2160795299.251401,2160795300.244631,2160795301.249031,2160795302.248642,2160795303.247692,2160795304.253313,2160795305.252253,2160795306.250534,2160795307.247854,2160795308.241694,2160795309.234995,2160795310.240585,2160795311.241476,2160795312.243056,2160795313.248497,2160795314.248267,2160795315.247457,2160795316.247918,2160795317.246048,2160795318.239218,2160795319.232669,2160795320.239779,2160795321.2409,2160795322.24115,2160795323.240231,2160795324.235531,2160795325.229851,2160795326.236352,2160795327.237962,2160795328.243053,2160795329.249013,2160795330.247874,2160795331.241094,2160795332.247345,2160795333.247255,2160795334.246396,2160795335.245606,2160795336.246116,2160795337.245507,2160795338.244818,2160795339.244258,2160795340.244999,2160795341.244599,2160795342.24302,2160795343.23658,2160795344.23158,2160795345.238001,2160795346.239651,2160795347.240322,2160795348.242042,2160795349.242193,2160795350.242183,2160795351.247394,2160795352.248614,2160795353.247755,2160795354.247085,2160795355.246225,2160795356.246686,2160795357.245996,2160795358.250607,2160795359.250017,2160795360.250048,2160795361.249178,2160795362.248189,2160795363.247109,2160795364.247739,2160795365.24701,2160795366.24616,2160795367.250751,2160795368.251381,2160795369.250132,2160795370.248772,2160795371.247622,2160795372.247843,2160795373.252013,2160795374.251234,2160795375.249914,2160795376.249925,2160795377.249135,2160795378.248175,2160795379.247216,2160795380.252876,2160795381.257237,2160795382.255657,2160795383.253868,2160795384.253168,2160795385.251489,2160795386.25528,2160795387.25377,2160795388.253171,2160795389.251471,2160795390.249872,2160795391.248492,2160795392.248562,2160795393.247643,2160795394.246653,2160795395.246404,2160795396.246884,2160795397.246304,2160795398.245655,2160795399.245385,2160795400.246116,2160795401.245636,2160795402.250367,2160795403.248817,2160795404.243297,2160795405.248088,2160795406.247968,2160795407.247269,2160795408.2607,2160795409.25393,2160795410.24561,2160795411.250061,2160795412.250721,2160795413.249621,2160795414.253742,2160795415.252742,2160795416.252443,2160795417.256233,2160795418.254824,2160795419.252924,2160795420.252465,2160795421.249835,2160795422.242705,2160795423.235625,2160795424.242286,2160795425.243017,2160795426.242997,2160795427.248327,2160795428.256778,2160795429.255898,2160795430.252889,2160795431.245099,2160795432.238879,2160795433.24416,2160795434.244701,2160795435.244502,2160795436.250802,2160795437.250293,2160795438.248993,2160795439.247784,2160795440.246924,2160795441.240084,2160795442.233685,2160795443.239015,2160795444.241646,2160795445.242076,2160795446.247537,2160795447.252927,2160795448.254048,2160795449.252768,2160795450.251208,2160795451.249749,2160795452.255239,2160795453.25401,2160795454.25217,2160795455.25075,2160795456.253851,2160795457.252651,2160795458.256382,2160795459.260282,2160795460.265253,2160795461.262543,2160795462.259554,2160795463.256624,2160795464.255494,2160795465.258475,2160795466.256625,2160795467.254446,2160795468.252906,2160795469.245156,2160795470.237637,2160795471.242667,2160795472.244308,2160795473.244298,2160795474.244399,2160795475.244209,2160795476.245399,2160795477.25018,2160795478.24983,2160795479.248861,2160795480.249191,2160795481.247212,2160795482.245642,2160795483.238992,2160795484.245373,2160795485.245674,2160795486.245254,2160795487.250005,2160795488.249756,2160795489.242676,2160795490.247196,2160795491.247057,2160795492.247537,2160795493.252328,2160795494.251618,2160795495.250359,2160795496.250719,2160795497.249629,2160795498.24855,2160795499.2465,2160795500.246171,2160795501.239651,2160795502.233151,2160795503.238972,2160795504.241432,2160795505.241943,2160795506.247463,2160795507.246664,2160795508.241464,2160795509.246875,2160795510.247105,2160795511.246745,2160795512.246536,2160795513.240086,2160795514.233656,2160795515.239627,2160795516.242008,2160795517.242568,2160795518.248119,2160795519.248349,2160795520.252119,2160795521.25141,2160795522.24923,2160795523.242201,2160795524.236751,2160795525.242861,2160795526.243902,2160795527.244182,2160795528.245583,2160795529.245563,2160795530.245324,2160795531.250414,2160795532.251405,2160795533.250495,2160795534.249426,2160795535.248457,2160795536.248937,2160795537.248588,2160795538.247968,2160795539.247298,2160795540.253379,2160795541.252639,2160795542.25142,2160795543.25008,2160795544.250231,2160795545.249351,2160795546.248451,2160795547.248002,2160795548.248852,2160795549.248233,2160795550.252843,2160795551.252234,2160795552.252324,2160795553.251405,2160795554.250275,2160795555.249215,2160795556.254846,2160795557.253916,2160795558.252427,2160795559.251247,2160795560.251168,2160795561.250158,2160795562.249118,2160795563.253229,2160795564.259689,2160795565.25836,2160795566.25656,2160795567.254621,2160795568.254151,2160795569.257902,2160795570.256442,2160795571.254502,2160795572.254293,2160795573.252773,2160795574.251374,2160795575.254994,2160795576.255095,2160795577.253535,2160795578.251975,2160795579.250606,2160795580.250636,2160795581.254997,2160795582.252968,2160795583.245508,2160795584.239579,2160795585.245109,2160795586.24577,2160795587.24579,2160795588.252281,2160795589.251851,2160795590.250641,2160795591.254912,2160795592.255112,2160795593.253563,2160795594.252313,2160795595.250944,2160795596.250974,2160795597.255315,2160795598.253235,2160795599.245785,2160795600.251396,2160795601.250936,2160795602.249837,2160795603.248787,2160795604.254408,2160795605.258788,2160795606.257188,2160795607.254149,2160795608.247479,2160795609.240109,2160795610.24516,2160795611.2456,2160795612.246571,2160795613.251601,2160795614.251232,2160795615.250332,2160795616.250623,2160795617.249733,2160795618.248753,2160795619.246904,2160795620.241444,2160795621.235184,2160795622.241115,2160795623.242475,2160795624.244166,2160795625.244456,2160795626.249847,2160795627.248857,2160795628.243448,2160795629.248608,2160795630.248799,2160795631.24839,2160795632.25445,2160795633.253751,2160795634.252321,2160795635.251312,2160795636.251372,2160795637.250453,2160795638.254813,2160795639.253933,2160795640.253974,2160795641.252654,2160795642.251715,2160795643.250295,2160795644.250836,2160795645.250306,2160795646.249526,2160795647.248747,2160795648.255037,2160795649.254228,2160795650.252748,2160795651.251689,2160795652.252029,2160795653.250929,2160795654.25517,2160795655.25429,2160795656.254621,2160795657.253461,2160795658.252132,2160795659.250932,2160795660.256683,2160795661.260813,2160795662.259064,2160795663.257134,2160795664.256634,2160795665.254725,2160795666.253025,2160795667.256836,2160795668.255756,2160795669.248026,2160795670.240417,2160795671.233827,2160795672.229417,2160795673.236648,2160795674.238888,2160795675.240299,2160795676.242989,2160795677.24358,2160795678.243881,2160795679.249431,2160795680.259162,2160795681.258323,2160795682.261603,2160795683.258613,2160795684.251544,2160795685.243784,2160795686.248445,2160795687.248525,2160795688.249245,2160795689.254256,2160795690.258996,2160795691.257847,2160795692.257367,2160795693.255578,2160795694.253918,2160795695.252499,2160795696.252429,2160795697.251449,2160795698.2505,2160795699.24961,2160795700.250151,2160795701.248511,2160795702.241871,2160795703.235472,2160795704.242672,2160795705.244063,2160795706.244383,2160795707.244574,2160795708.251264,2160795709.256475,2160795710.255575,2160795711.254045,2160795712.256846,2160795713.255566,2160795714.254227,2160795715.252807,2160795716.252718,2160795717.256488,2160795718.255439,2160795719.253799,2160795720.258889,2160795721.2576,2160795722.25551,2160795723.253911,2160795724.253511,2160795725.252182,2160795726.250952,2160795727.249893,2160795728.250194,2160795729.249434,2160795730.248954,2160795731.248255,2160795732.248895,2160795733.253626,2160795734.253076,2160795735.251877,2160795736.252047,2160795737.250977,2160795738.250068,2160795739.254068,2160795740.254449,2160795741.253139,2160795742.25206,2160795743.25088,2160795744.251031,2160795745.255451,2160795746.254601,2160795747.253192,2160795748.253102,2160795749.252163,2160795750.251053,2160795751.250004,2160795752.250284,2160795753.249534,2160795754.248935,2160795755.248555,2160795756.249276,2160795757.248726,2160795758.253407,2160795759.258167,2160795760.263568,2160795761.261848,2160795762.259148,2160795763.256679,2160795764.261039,2160795765.2645,2160795766.26711,2160795767.264331,2160795768.262441,2160795769.259641,2160795770.257292,2160795771.260392,2160795772.259753,2160795773.257543,2160795774.255444,2160795775.253645,2160795776.253325,2160795777.251096,2160795778.243936,2160795779.237056,2160795780.243827,2160795781.244787,2160795782.244888,2160795783.250238,2160795784.251469,2160795785.250709,2160795786.248759,2160795787.24211,2160795788.23669,2160795789.23128,2160795790.238011,2160795791.239931,2160795792.242292,2160795793.248322,2160795794.253723,2160795795.253473,2160795796.258964,2160795797.257634,2160795798.255655,2160795799.252815,2160795800.246745,2160795801.239826,2160795802.245156,2160795803.246027,2160795804.247267,2160795805.252508,2160795806.251258,2160795807.244478,2160795808.239029,2160795809.244839,2160795810.24584,2160795811.24609,2160795812.252761,2160795813.257821,2160795814.255692,2160795815.259632,2160795816.259453,2160795817.257443,2160795818.255493,2160795819.253924,2160795820.253654,2160795821.252825,2160795822.251686,2160795823.250696,2160795824.256327,2160795825.255487,2160795826.253888,2160795827.252448,2160795828.252409,2160795829.251429,2160795830.250459,2160795831.24995,2160795832.25049,2160795833.249841,2160795834.254151,2160795835.253542,2160795836.253872,2160795837.257973,2160795838.256823,2160795839.255183,2160795840.255184,2160795841.253834,2160795842.252755,2160795843.256545,2160795844.257046,2160795845.255486,2160795846.254216,2160795847.253067,2160795848.253167,2160795849.252168,2160795850.256458,2160795851.255599,2160795852.256589,2160795853.255679,2160795854.25381,2160795855.25367,2160795856.258721,2160795857.263461,2160795858.266332,2160795859.264422,2160795860.263273,2160795861.260643,2160795862.258503,2160795863.256414,2160795864.256074,2160795865.259565,2160795866.258435,2160795867.256345,2160795868.256716,2160795869.255107,2160795870.253597,2160795871.252568,2160795872.253088,2160795873.252069,2160795874.251099,2160795875.25524,2160795876.25616,2160795877.254741,2160795878.258631,2160795879.257752,2160795880.257742,2160795881.261292,2160795882.259653,2160795883.257633,2160795884.257724,2160795885.256134,2160795886.254484,2160795887.258375,2160795888.258675,2160795889.256896,2160795890.255716,2160795891.254007,2160795892.254167,2160795893.253047,2160795894.257658,2160795895.257338,2160795896.256929,2160795897.256239,2160795898.25434,2160795899.25304,2160795900.253681,2160795901.252891,2160795902.251871,2160795903.251022,2160795904.251772,2160795905.251143,2160795906.255713,2160795907.255134,2160795908.255464,2160795909.254504,2160795910.253305,2160795911.252215,2160795912.252716,2160795913.251886,2160795914.251067,2160795915.250357,2160795916.251227,2160795917.250738,2160795918.250109,2160795919.254869,2160795920.26084,2160795921.259521,2160795922.257761,2160795923.255941,2160795924.255722,2160795925.259672,2160795926.263673,2160795927.261953,2160795928.261454,2160795929.259244,2160795930.257184,2160795931.260635,2160795932.260605,2160795933.258616,2160795934.256846,2160795935.260407,2160795936.259267,2160795937.251337,2160795938.255198,2160795939.254498,2160795940.254759,2160795941.259039,2160795942.25806,2160795943.2565,2160795944.25684,2160795945.255461,2160795946.254181,2160795947.258332,2160795948.258772,2160795949.257193,2160795950.256083,2160795951.254683,2160795952.254944,2160795953.253934,2160795954.253245,2160795955.252415,2160795956.253076,2160795957.252376,2160795958.251666,2160795959.251017,2160795960.252237,2160795961.251718,2160795962.251128,2160795963.250609,2160795964.251549,2160795965.2511,2160795966.25076,2160795967.250311,2160795968.251372,2160795969.250952,2160795970.250502,2160795971.250093,2160795972.256273,2160795973.260734,2160795974.259514,2160795975.257655,2160795976.257835,2160795977.256246,2160795978.254776,2160795979.258806,2160795980.259337,2160795981.257687,2160795982.256308,2160795983.255028,2160795984.254959,2160795985.259229,2160795986.2635,2160795987.26723,2160795988.266491,2160795989.263671,2160795990.260991,2160795991.263672,2160795992.262952,2160795993.260723,2160795994.258613,2160795995.262063,2160795996.262004,2160795997.259934,2160795998.258305,2160795999.257295,2160796000.257016,2160796001.254696,2160796002.252796,2160796003.245917,2160796004.252097,2160796005.252238,2160796006.251668,2160796007.256349,2160796008.256029,2160796009.254159,2160796010.2586,2160796011.25771,2160796012.257531,2160796013.261502,2160796014.265462,2160796015.263843,2160796016.262813,2160796017.260654,2160796018.258584,2160796019.256845,2160796020.256545,2160796021.255345,2160796022.254206,2160796023.253556,2160796024.254017,2160796025.253277,2160796026.251488,2160796027.250148,2160796028.245008,2160796029.250469,2160796030.251059,2160796031.25097,2160796032.25748,2160796033.256991,2160796034.255741,2160796035.254541,2160796036.255052,2160796037.254202,2160796038.253333,2160796039.257803,2160796040.258404,2160796041.257044,2160796042.255715,2160796043.254895,2160796044.255045,2160796045.254156,2160796046.253306,2160796047.252847,2160796048.254107,2160796049.253497,2160796050.252828,2160796051.257558,2160796052.258519,2160796053.257249,2160796054.25622,2160796055.25506,2160796056.255491,2160796057.254621,2160796058.259021,2160796059.263532,2160796060.263712,2160796061.261933,2160796062.259884,2160796063.258084,2160796064.263175,2160796065.261775,2160796066.259796,2160796067.258006,2160796068.257697,2160796069.256447,2160796070.260498,2160796071.264768,2160796072.264478,2160796073.261199,2160796074.253119,2160796075.245399,2160796076.23994,2160796077.24619,2160796078.247581,2160796079.248261,2160796080.250272,2160796081.250592,2160796082.250533,2160796083.255733,2160796084.257194,2160796085.256354,2160796086.260634,2160796087.259755,2160796088.259475,2160796089.258416,2160796090.257056,2160796091.255837,2160796092.261347,2160796093.259277,2160796094.251618,2160796095.255818,2160796096.256589,2160796097.255799,2160796098.25486,2160796099.25929,2160796100.25872,2160796101.251421,2160796102.255771,2160796103.255462,2160796104.255942,2160796105.260443,2160796106.259683,2160796107.258393,2160796108.258734,2160796109.257565,2160796110.256425,2160796111.260706,2160796112.261357,2160796113.259967,2160796114.258487,2160796115.257188,2160796116.257308,2160796117.256399,2160796118.255489,2160796119.254719,2160796120.25528,2160796121.26,2160796122.259391,2160796123.258181,2160796124.261362,2160796125.260232,2160796126.258803,2160796127.257773,2160796128.257893,2160796129.256884,2160796130.261334,2160796131.260455,2160796132.260225,2160796133.259186,2160796134.257816,2160796135.256636,2160796136.256847,2160796137.255987,2160796138.255138,2160796139.254408,2160796140.259979,2160796141.259239,2160796142.257879,2160796143.26193,2160796144.26212,2160796145.260501,2160796146.259351,2160796147.257922,2160796148.257982,2160796149.262233,2160796150.266553,2160796151.265044,2160796152.264544,2160796153.262504,2160796154.260545,2160796155.264095,2160796156.268866,2160796157.266826,2160796158.264247,2160796159.261898,2160796160.261108,2160796161.259509,2160796162.263539,2160796163.262299,2160796164.26178,2160796165.26018,2160796166.258701,2160796167.257381,2160796168.257822,2160796169.256912,2160796170.255982,2160796171.254123,2160796172.248693,2160796173.242383,2160796174.248264,2160796175.249454,2160796176.251455,2160796177.256985,2160796178.262276,2160796179.267167,2160796180.266027,2160796181.269477,2160796182.267448,2160796183.264958,2160796184.262929,2160796185.255079,2160796186.247469,2160796187.25236,2160796188.25393,2160796189.253871,2160796190.258961,2160796191.264042,2160796192.264392,2160796193.262772,2160796194.261093,2160796195.259793,2160796196.259754,2160796197.258704,2160796198.257715,2160796199.257215,2160796200.257665,2160796201.257026,2160796202.256306,2160796203.255727,2160796204.256437,2160796205.255998,2160796206.260749,2160796207.265249,2160796208.26546,2160796209.26365,2160796210.261901,2160796211.260461,2160796212.260641,2160796213.259572,2160796214.258512,2160796215.257913,2160796216.258333,2160796217.257664,2160796218.256914,2160796219.256254,2160796220.256965,2160796221.256495,2160796222.261216,2160796223.260766,2160796224.260987,2160796225.260157,2160796226.259088,2160796227.258048,2160796228.263918,2160796229.263339,2160796230.261829,2160796231.26068,2160796232.26071,2160796233.259551,2160796234.263811,2160796235.268172,2160796236.268032,2160796237.266212,2160796238.264163,2160796239.262293,2160796240.267524,2160796241.270984,2160796242.268915,2160796243.266445,2160796244.272126,2160796245.270136,2160796246.267656,2160796247.270657,2160796248.270317,2160796249.267798,2160796250.265388,2160796251.263308,2160796252.263009,2160796253.261829,2160796254.26049,2160796255.25926,2160796256.258731,2160796257.251781,2160796258.245002,2160796259.250592,2160796260.253153,2160796261.253513,2160796262.253624,2160796263.258974,2160796264.265865,2160796265.264865,2160796266.268076,2160796267.266446,2160796268.265657,2160796269.264137,2160796270.262467,2160796271.260948,2160796272.265808,2160796273.264629,2160796274.262829,2160796275.26149,2160796276.26129,2160796277.26023,2160796278.259411,2160796279.258451,2160796280.258802,2160796281.263412,2160796282.267993,2160796283.271653,2160796284.271004,2160796285.268734,2160796286.266264,2160796287.264035,2160796288.268705,2160796289.272376,2160796290.270166,2160796291.267837,2160796292.266717,2160796293.264817,2160796294.263148,2160796295.261508,2160796296.261319,2160796297.265199,2160796298.26411,2160796299.26255,2160796300.267641,2160796301.266191,2160796302.264101,2160796303.262232,2160796304.262152,2160796305.260873,2160796306.259633,2160796307.258564,2160796308.258875,2160796309.258195,2160796310.262696,2160796311.262026,2160796312.261996,2160796313.261117,2160796314.259917,2160796315.258898,2160796316.258218,2160796317.256708,2160796318.250019,2160796319.255029,2160796320.25661,2160796321.25639,2160796322.261311,2160796323.266191,2160796324.266502,2160796325.264742,2160796326.263033,2160796327.261453,2160796328.261333,2160796329.259274,2160796330.252294,2160796331.245554,2160796332.252405,2160796333.253465,2160796334.253696,2160796335.259026,2160796336.260337,2160796337.259527,2160796338.258658,2160796339.257868,2160796340.258428,2160796341.256889,2160796342.250409,2160796343.243939,2160796344.23956,2160796345.24657,2160796346.248681,2160796347.249951,2160796348.257442,2160796349.263142,2160796350.267923,2160796351.266973,2160796352.266394,2160796353.264504,2160796354.268015,2160796355.266446,2160796356.265676,2160796357.264207,2160796358.262647,2160796359.261108,2160796360.266008,2160796361.264819,2160796362.263059,2160796363.261779,2160796364.26163,2160796365.26049,2160796366.259381,2160796367.263711,2160796368.281792,2160796369.279052,2160796370.280933,2160796371.277513,2160796372.275103,2160796373.276644,2160796374.273594,2160796375.270405,2160796376.268785,2160796377.266395,2160796378.264256,2160796379.267686,2160796380.272617,2160796381.269457,2160796382.272298,2160796383.269938,2160796384.268499,2160796385.266179,2160796386.264079,2160796387.26227,2160796388.26196,2160796389.259791,2160796390.257931,2160796391.251241,2160796392.257302,2160796393.257522,2160796394.256963,2160796395.255473,2160796396.250213,2160796397.244114,2160796398.250124,2160796399.251445,2160796400.253255,2160796401.258896,2160796402.264017,2160796403.263437,2160796404.263498,2160796405.262128,2160796406.260769,2160796407.259739,2160796408.25903,2160796409.25751,2160796410.25581,2160796411.260821,2160796412.261701,2160796413.260802,2160796414.265002,2160796415.263073,2160796416.256853,2160796417.261294,2160796418.260974,2160796419.260124,2160796420.265855,2160796421.263855,2160796422.256266,2160796423.260376,2160796424.261337,2160796425.260507,2160796426.259597,2160796427.257838,2160796428.257678,2160796429.251218,2160796430.256259,2160796431.256619,2160796432.25765,2160796433.26236,2160796434.262031,2160796435.261071,2160796436.266722,2160796437.265632,2160796438.263863,2160796439.261303,2160796440.255373,2160796441.248594,2160796442.254024,2160796443.254865,2160796444.256745,2160796445.261976,2160796446.261826,2160796447.261006,2160796448.261867,2160796449.261278,2160796450.260388,2160796451.259559,2160796452.26563,2160796453.26477,2160796454.26325,2160796455.262151,2160796456.262561,2160796457.261522,2160796458.265762,2160796459.270183,2160796460.270223,2160796461.268304,2160796462.266224,2160796463.264344,2160796464.269475,2160796465.273265,2160796466.271546,2160796467.269036,2160796468.268247,2160796469.266217,2160796470.264357,2160796471.268018,2160796472.273168,2160796473.271779,2160796474.274449,2160796475.27198,2160796476.27047,2160796477.26837,2160796478.266201,2160796479.264291,2160796480.269132,2160796481.266652,2160796482.258672,2160796483.262423,2160796484.262983,2160796485.261934,2160796486.266094,2160796487.264115,2160796488.257875,2160796489.250935,2160796490.256076,2160796491.256606,2160796492.257737,2160796493.262937,2160796494.261728,2160796495.254858,2160796496.260879,2160796497.260829,2160796498.26,2160796499.264551,2160796500.264111,2160796501.256811,2160796502.261112,2160796503.260822,2160796504.261193,2160796505.265833,2160796506.264094,2160796507.262064,2160796508.256234,2160796509.261015,2160796510.261005,2160796511.260486,2160796512.266396,2160796513.270867,2160796514.269337,2160796515.267278,2160796516.266638,2160796517.264978,2160796518.268739,2160796519.267399,2160796520.26682,2160796521.26519,2160796522.263801,2160796523.262451,2160796524.262521,2160796525.261592,2160796526.260712,2160796527.259883,2160796528.260433,2160796529.259844,2160796530.259244,2160796531.258704,2160796532.259455,2160796533.259065,2160796534.258586,2160796535.258176,2160796536.259087,2160796537.258757,2160796538.258537,2160796539.258118,2160796540.259318,2160796541.258999,2160796542.258559,2160796543.2582,2160796544.25908,2160796545.258771,2160796546.258691,2160796547.258312,2160796548.259223,2160796549.258923,2160796550.263784,2160796551.268704,2160796552.267965,2160796553.271725,2160796554.270165,2160796555.267956,2160796556.272566,2160796557.270717,2160796558.268367,2160796559.266538,2160796560.278748,2160796561.277219,2160796562.279559,2160796563.28207,2160796564.28031,2160796565.27688,2160796566.273351,2160796567.270341,2160796568.274272,2160796569.277382,2160796570.274722,2160796571.271823,2160796572.269603,2160796573.261253,2160796574.253304,2160796575.257904,2160796576.259235,2160796577.258985,2160796578.263876,2160796579.262566,2160796580.256836,2160796581.261617,2160796582.261607,2160796583.261048,2160796584.266978,2160796585.271469,2160796586.269939,2160796587.26831,2160796588.27364,2160796589.271911,2160796590.269561,2160796591.266471,2160796592.265312,2160796593.266452,2160796594.265643,2160796595.264343,2160796596.264364,2160796597.268514,2160796598.267355,2160796599.265675,2160796600.265736,2160796601.264346,2160796602.262967,2160796603.267187,2160796604.267338,2160796605.265808,2160796606.264539,2160796607.263209,2160796608.263219,2160796609.26225,2160796610.26127,2160796611.260401,2160796612.260911,2160796613.265642,2160796614.264982,2160796615.263743,2160796616.264213,2160796617.263103,2160796618.262034,2160796619.261034,2160796620.261645,2160796621.260935,2160796622.265516,2160796623.263936,2160796624.257966,2160796625.262527,2160796626.262337,2160796627.261578,2160796628.267418,2160796629.271559,2160796630.269979,2160796631.2682,2160796632.26756,2160796633.26589,2160796634.271611,2160796635.275521,2160796636.275172,2160796637.272482,2160796638.269873,2160796639.267613,2160796640.266883,2160796641.270614,2160796642.269204,2160796643.267285,2160796644.267605,2160796645.266006,2160796646.264516,2160796647.263507,2160796648.264108,2160796649.262938,2160796650.267319,2160796651.265449,2160796652.259769,2160796653.2527,2160796654.25785,2160796655.258391,2160796656.259861,2160796657.258691,2160796658.252442,2160796659.246292,2160796660.253793,2160796661.255153,2160796662.255674,2160796663.261264,2160796664.267955,2160796665.267125,2160796666.265565,2160796667.264106,2160796668.264356,2160796669.268437,2160796670.267407,2160796671.265898,2160796672.265758,2160796673.264478,2160796674.263259,2160796675.262179,2160796676.26249,2160796677.26175,2160796678.260961,2160796679.265551,2160796680.266212,2160796681.265022,2160796682.263712,2160796683.262573,2160796684.262863,2160796685.262534,2160796686.261734,2160796687.260994,2160796688.261595,2160796689.261065,2160796690.260486,2160796691.264926,2160796692.265727,2160796693.264637,2160796694.263458,2160796695.262439,2160796696.262749,2160796697.262,2160796698.26162,2160796699.26098,2160796700.264651,2160796701.269522,2160796702.268582,2160796703.267032,2160796704.267153,2160796705.265793,2160796706.264374,2160796707.268464,2160796708.273965,2160796709.277175,2160796710.275086,2160796711.272286,2160796712.270966,2160796713.274117,2160796714.272487,2160796715.270118,2160796716.269228,2160796717.267688,2160796718.266139,2160796719.264619,2160796720.26981,2160796721.2686,2160796722.266801,2160796723.265451,2160796724.265241,2160796725.264052,2160796726.263242,2160796727.262263,2160796728.262603,2160796729.262173,2160796730.261404,2160796731.260724,2160796732.261335,2160796733.260835,2160796734.260406,2160796735.259866,2160796736.261036,2160796737.260617,2160796738.260117,2160796739.264938,2160796740.265748,2160796741.264669,2160796742.26253,2160796743.25579],"weight":[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],"weightType":"samples","threadCPUDelta":[0,1585,3881,3362,3052,2561,2176,2013,1765,1656,1493,1420,1363,1272,1229,1199,1147,1123,1108,1079,1066,1056,1050,1035,1029,1025,1023,1016,1013,1010,1011,1006,1005,1004,1006,1002,1002,1001,1004,1000,1000,1000,1003,1000,1000,999,1002,999,999,999,1002,999,999,999,1002,999,999,999,1002,999,999,999,930,785,792,819,846,765,600,623,674,726,776,824,854,882,896,911,933,949,952,960,974,974,978,986,985,986,1000,997,996,1002,1003,998,996,997,997,999,1003,998,997,999,998,998,998,1006,999,998,998,1000,999,998,998,1000,999,999,999,1005,1004,998,998,1013,1002,1001,1001,997,995,996,996,1003,997,997,997,1002,998,998,1003,999,997,998,998,1000,998,998,998,1000,999,999,1011,1006,1003,1001,1001,1005,995,995,995,997,997,997,997,999,998,998,1003,1000,998,1010,1004,1004,1001,1001,1001,1008,1001,1000,999,999,999,994,1005,1001,999,998,999,995,994,994,995,1002,997,996,1008,1004,1002,1006,1000,1001,994,994,994,996,996,997,997,1010,1004,1002,1000,997,995,996,996,998,997,997,998,999,1010,1004,1003,1002,1001,996,1006,1003,1001,999,994,995,995,996,996,1003,997,997,997,1005,998,997,997,999,998,998,998,1000,999,999,999,1000,999,999,1004,1000,998,998,999,1000,999,1010,1005,1005,1008,1002,1001,1001,994,994,995,997,996,1008,1003,1003,1000,1000,995,1008,1001,1000,1000,1000,994,994,995,996,996,1008,1003,1003,1006,1001,1000,1000,994,993,1006,1002,1000,1000,1005,1000,993,993,994,996,996,996,997,1010,1003,1002,1007,1008,999,1005,999,1000,997,997,993,994,994,995,995,997,997,997,1009,1005,1002,1007,1000,996,1006,1001,1000,1006,1005,997,992,993,993,994,995,997,996,1009,1003,1003,1007,1000,994,1007,1000,999,1005,1006,998,996,997,993,993,1005,1000,1001,998,993,994,996,996,996,1008,1004,1002,1006,1000,1001,1006,1000,999,1000,997,993,993,1007,1001,1000,998,995,1006,999,994,995,995,996,995,1009,1003,1001,1000,1001,995,994,995,997,996,1009,1003,1006,1000,995,994,1008,1002,1000,999,996,994,1007,1001,1002,1000,999,999,995,994,995,995,997,997,997,997,999,998,1010,1004,1004,1007,1007,999,995,994,994,995,1009,1002,1001,1000,996,994,995,996,998,1008,1003,1002,1002,995,995,1007,1003,1001,1000,999,996,994,995,996,997,1009,1003,1002,1008,1001,1000,998,995,994,1006,1001,1001,999,994,994,996,1007,1002,1001,1001,1000,994,994,996,996,1008,1003,1003,1006,999,994,995,995,995,996,1015,1003,1001,1006,1000,993,993,994,996,996,996,997,1010,1003,1002,1007,1004,1005,998,993,994,994,995,995,997,1008,1003,1002,1008,999,994,994,996,995,1008,1002,1006,1001,1000,1000,1001,998,993,994,1001,995,995,996,997,997,1009,1004,1004,1007,1000,994,996,995,995,1008,1004,1001,1001,1001,1002,1000,999,999,995,994,995,995,1000,997,997,1009,1005,1002,1000,995,996,1007,1002,1001,1003,1005,999,993,1000,1003,999,999,1001,1001,1000,999,1001,999,999,999,1006,998,992,992,1006,1000,999,1005,1001,999,999,999,1000,997,998,993,1006,1000,999,1004,999,992,1004,999,1000,999,1004,998,994,1005,999,999,1005,1004,1003,996,992,992,992,993,996,1007,1002,1001,1002,1005,1005,1005,1005,996,991,1003,1000,998,997,998,994,993,994,995,1005,996,995,1008,1004,1001,1001,999,1001,994,1006,1001,1001,999,994,994,1010,1001,1000,998,995,994,995,1005,1003,1000,995,995,996,1008,1002,1001,1001,1000,994,1006,1002,1000,1005,998,995,1005,1000,999,999,993,993,994,1008,1002,1001,999,1001,994,994,995,996,996,997,997,1010,1004,1002,1001,1005,995,994,1007,1003,1001,1006,999,995,1005,1000,999,999,998,993,1005,1004,1000,998,998,994,994,994,995,997,1008,1003,1002,1001,995,994,1007,1003,1001,999,994,996,995,996,996,1009,1003,1002,1001,1008,999,993,1005,1001,1000,998,993,995,1006,1001,1000,1000,994,994,1006,1002,1000,999,999,995,994,1007,1001,1002,999,994,994,1008,1001,1000,1005,1000,993,1005,1000,1000,1000,999,1000,1000,1000,999,1000,1005,1000,997,1004,999,992,1004,999,1000,997,998,1004,1001,999,998,1004,1005,998,997,998,999,998,1003,998,999,999,998,998,1005,999,997,1003,999,997,998,998,1000,1004,999,998,999,998,998,998,1005,998,998,998,1000,998,1004,999,999,998,998,998,1005,1004,1003,997,999,997,997,1003,999,997,997,998,1000,998,998,999,1005,997,992,1004,1001,999,999,997,994,993,1006,1001,1001,1005,998,993,1006,1000,999,1004,1000,998,1003,998,999,998,998,998,1000,999,999,999,1005,999,998,998,1000,999,1004,1004,999,998,997,998,1004,1003,1002,997,998,997,997,1002,999,998,998,998,999,998,998,999,1000,999,999,999,1000,999,998,993,1000,1003,1001,1000,1004,1005,1004,997,1005,998,997,1002,999,997,997,998,999,998,998,997,994,1005,1000,999,1000,999,999,999,1000,999,1008,999,1000,998,998,998,1005,1004,998,997,999,998,998,997,997,993,1005,1000,1001,998,993,993,1007,1001,1000,999,995,994,995,995,997,997,1009,1003,1004,1000,1000,995,1008,1001,1000,1005,1000,993,1005,1000,1000,1004,998,992,1006,999,999,999,999,993,993,1006,1002,1000,1005,999,997,1005,999,999,1005,998,998,998,1000,998,998,1004,1005,998,997,997,999,1003,997,992,1005,999,998,1004,1005,1003,997,997,998,1003,996,991,993,1005,1000,999,1006,998,992,1004,1001,999,1004,999,999,997,992,993,1006,1000,1000,1005,999,993,993,1005,1002,1000,998,994,995,1006,1001,1000,1007,998,993,1005,1001,999,1004,998,994,993,1005,1000,1001,1005,998,993,1006,1000,999,997,999,993,1005,1000,1001,998,993,993,1007,1001,1000,1000,1000,999,994,1005,1002,1000,998,999,995,1006,1000,1000,1000,993,993,1006,1002,1000,1000,1000,1001,1000,998,993,995,1007,1001,1000,1007,1005,999,997,993,993,1005,1000,1001,1004,999,999,1000,999,999,999,999,993,993,1006,1002,1000,1005,1005,1000,998,998,998,1005,1004,998,997,999,998,998,1003,998,992,1004,999,1000,1004,1004,998,999,998,998,1003,998,992,1004,999,1000,998,1013,1010,1003,1000,1000,993,990,990,991,1004,1001,999,999,999,1000,998,998,993,995,1006,1001,1000,1000,999,994,1006,1002,1000,998,999,995,1005,1000,1000,1006,1004,998,998,999,998,998,1004,998,992,1004,999,1000,1004,1004,998,999,998,998,1003,1005,997,997,997,999,998,1003,997,993,1004,999,998,1005,997,992,1004,1000,999,997,993,994,1006,1001,1000,1006,998,998,993,994,994,995,1007,1003,1001,1006,1000,1001,998,993,993,1007,1001,1000,1005,1006,999,998,997,994,993,1005,1000,1001,998,999,994,1007,1000,999,1005,999,993,1004,999,1000,998,998,993,995,1006,1001,1000,1005,1000,997,1002,999,997,995,991,993,1005,1000,999,1006,999,998,998,1000,999,999,998,994,993,1006,1001,1001,1005,1005,999,1000,998,998,999,1000,999,999,999,1000,999,999,999,1000,1004,999,998,999,993,993,1005,1002,1000,1005,1000,1000,999,999,999,1000,999,999,999,1001,999,999,1004,1006,1004,998,997,998,998,998,998,1000,998,998,1004,999,992,992,1005,1001,1000,1005,999,1000,997,993,993,1006,1001,998,1004,999,992,1004,999,1000,998,1004,997,994,993,993,994,1008,1002,1001,1000,1002,1000,998,994,995,1006,1001,1000,1007,1000,999,999,1000,999,999,999,1000,999,1004,999,1000,998,998,999,1000,999,999,1004,999,992,1004,999,1000,999,1004,999,1003,998,998,998,1005,998,998,998,999,998,1004,1004,999,998,997,998,999,1004,997,992,1005,999,998,1004,1000,998,997,992,994,1005,1000,1000,1006,999,998,998,1000,999,999,998,999,993,1005,1000,1007,998,998,998,1006,999,998,997,994,993,1005,1001,1001,1000,999,999,1001,999,999,999,1001,999,999,998,995,993,1006,1001,1001,1005,1000,999,1006,999,998,998,1000,999,999,1004,1000,998,998,998,1000,1004,999,998,999,998,998,1004,1003,998,998,998,999,998,1004,1004,999,997,997,998,999,998,998,999,1000,999,999,999,1000,998,993,993,995,1007,1002,1001,1002,1000,1000,999,995,994,1006,1001,1002,1000,1000,1000,1006,998,993,993,1007,1001,1000,998,995,994,1006,1001,1002,1005,999,999,1006,999,998,1003,999,992,1004,999,1000,999,999,999,1001,1004,998,992,1006,999,998,999,1000,999,1004,1004,999,998,998,998,999,999,998,999,1005,999,998,997,993,993,1005,1000,1001,1005,1005,1004,1000,997,997,1003,1004,997,997,997,999,998,1003,1003,999,997,997,997,1004,998,997,1003,999,997,1003,998,999,998,998,998,999,998,998,999,999,998,993,1005,1001,999,1004,998,994,1005,1000,999,1005,1004,997,991,1005,999,998,997,999,993,1005,1000,1000,1004,998,992,994,994,994,1007,1003,1001,1001,1005,1001,999,998,993,995,1006,1001,1000,1006,1005,999,998,999,998,997,992,994,1006,1001,1000,1001,1005,999,999,1000,999,999,999,1000,999,998,993,994,1006,1001,1000,1006,999,999,999,1000,999,999,999,1000,999,1004,999,1000,999,998,999,1005,999,998,998,999,998,997,993,994,994,1006,1002,1002,1001,1000,1000,1006,1000,999,1004,1005,998,998,998,999,998,998,999,1000,999,999,999,1006,1004,1003,998,998,997,1003,1003,999,997,997,997,1004,998,997,1003,1004,997,996,997,999,998,999,997,1000,999,998,999,1006,1004,1003,998,998,997,1002,998,1000,1003,996,996,993,1004,999,998,999,998,993,1005,1001,999,1004,1004,1000,998,998,998,1005,998,998,998,999,998,998,997,1002,999,999,999,1000,999,999,999,1005,998,992,1004,1001,999,999,999,1000,999,1004,998,994,1004,999,999,1000,999,999,1004,1000,998,998,998,1000,999,999,999,1000,1004,1004,998,999,998,998,1003,999,998,998,998,999,1004,1004,1003,999,997,997,1002,999,997,997,998,999,998,997,992,994,1006,1001,1000,1001,999,999,1005,1001,999,999,999,1000,999,1004,998,994,1004,999,999,999,993,993,1005,1002,1000,1005,1005,1000,998,998,998,1005,998,998,998,999,998,998,999,1000,999,999,999,1000,1004,998,993,1006,999,999,1004,1005,998,997,998,999,998,998,998,1000,999,999,999,1005,1004,998,998,999,998,1004,998,999,1003,998,998,999,998,998,998,1005,997,992,1004,1000,998,1004,997,999,1004,998,998,1005,997,991,992,1006,1000,999,1004,1006,997,1003,998,999,1003,998,997,999,998,998,998,1000,999,999,999,1000,999,1004,998,994,1004,999,999,1005,1004,998,998,999,998,1003,998,999,998,998,998,1000,1004,997,992,1005,999,998,1004,1005,998,997,997,999,1003,997,992,1005,999,998,1004,998,992,992,1005,1001,1000,1000,999,1001,1005,999,999,999,993,993,1005,1002,1000,1000,1000,1001,1005,998,993,994,1005,1001,1000,1001,999,999,999,1001,999,998,993,995,1006,1001,1000,1010,998,992,992,1006,1000,999,1005,1001,999,998,999,1000,1004,999,998,1000,998,998,999,1000,999,999,999,1000,999,1004,1000,1001,1002,997,992,1005,999,998,997,994,993,1005,1001,1001,1005,1005,998,994,1004,999,999,1005,1003,998,1003,999,997,997,997,999,998,998,998,1000,999,999,1004,1000,998,998,998,1000,999,999,999,1006,999,998,998,1001,998,998,1004,1005,997,996,991,1003,998,998,998,1000,1004,997,992,1005,999,998,998,1003,999,998,998,1000,1004,999,998,1000,998,998,998,1005,999,998,998,1000,998,1004,999,999,998,998,998,1005,1004,997,1003,999,997,997,1003,1004,997,996,996,993,1004,999,999,1005,998,998,997,993,993,1005,1000,1001,1005,999,999,1000,998,993,993,1007,1001,1000,999,995,994,1006,1001,1005,1005,998,993,1006,999,999,999,1000,999,999,999,1000,999,998,993,995,1006,1001,1000,1001,1000,999,1005,1001,999,999,999,1000,999,1004,999,1000,999,999,998,1000,999,999,1004,1000,998,998,998,1000,1004,999,998,1000,999,999,999,1005,1004,998,998,999,998,1003,998,999,998,998,998,1000,999,999,999,1000,999,999,999,1000,999,1004,998,994,1004,999,999,998,993,991,1004,1000,998,1004,999,999,1003,998,998,999,997,992,992,1006,1000,999,1005,1000,999,996,992,993,1005,1000,999,1006,999,998,998,999,993,993,1005,1002,1000,1005,1005,1001,998,998,998,1005,998,998,998,1003,998,1003,1003,1004,997,997,997,998,1002,998,997,998,992,992,1005,1001,999,1000,999,1001,1004,999,999,1000,998,998,993,1006,1000,999,1004,999,992,1004,999,1000,1004,999,998,1000,998,998,997,999,993,993,1005,1002,1000,1005,999,994,1005,1000,999,999,993,993,1005,1002,1000,1005,1000,1003,999,997,992,994,1006,1001,1000,1001,999,999,1005,1000,999,998,999,1000,999,999,999,1006,999,998,998,1000,999,999,999,1000,999,1004,999,1000,999,998,998,1005,999,998,998,999,998,998,1004,1006,998,998,998,999,1003,998,998,999,998,998,1003,1000,998,998,998,1000,1004,997,992,994,1005,1000,1000,1006,999,998,1004,1000,998,998,998,1000,1004,997,992,1005,999,998,998,1005,1004,998,996,993,992,1005,1000,1000,1005,999,999,1000,999,999,998,994,993,1005,1001,1001,1000,1005,999,994,1005,1000,999,1006,999,998,998,1000,999,1004,999,1000,998,999,998,1000,999,999,999,1006,999,998,998,1000,998,1004,999,1000,998,998,998,1005,1004,998,998,999,998,998,1003,998,992,992,993,995,1007,1002,1001,1002,1000,1000,1005,1009,999,1003,997,992,992,1004,1000,1000,1005,1004,998,999,998,998,998,999,999,999,999,1000,998,993,993,1007,1001,1000,1000,1006,1005,999,998,1002,998,998,998,999,1003,998,998,1005,998,997,998,999,998,998,998,1000,999,999,999,1000,1004,999,998,1000,998,999,1004,1000,998,998,998,1000,1004,999,998,999,999,998,998,1000,999,999,999,1000,999,1004,1004,1005,998,997,997,1004,1003,1002,997,998,997,997,1003,999,997,997,998,999,997,992,993,1006,1000,1000,1005,1001,999,998,993,994,994,1006,1001,1002,1006,1005,999,1005,998,998,997,993,993,1005,1000,1001,1005,998,993,994,1005,1001,1000,1006,1005,997,1003,999,997,998,998,999,999,998,999,1005,999,998,998,999,999,999,999,1000,999,1004,999,1000,1004,998,998,1000,998,998,1003,1000,998,998,998,1000,999,1004,999,1000,999,998,999,1005,1004,1002,998,998,997,997,997,999,1003,998,997,1000,998,998,998,1000,998,999,1004,1000,998,1003,999,999,1003,998,997,1000,998,998,1003,1000,998,998,998,1000,998,1004,999,999,999,998,998,1000,999,998,999,1000,999,1004,999,1000,999,998,998,1000,999,999,999,1000,999,999,1004,1005,998,998,998,999,1003,1004,998,999,997,997,1003,999,998,998,1003,998,992,1003,999,1000,1004,999,998,1000,998,998,1004,1000,998,998,998,1000,998,999,999,1000,999,999,999,1001,999,999,999,1000,999,999,999,1001,999,999,999,1006,1004,998,998,1000,998,998,1004,1000,998,998,998,999,1004,1004,1003,999,997,997,1002,999,997,997,1003,999,997,998,998,999,997,998,993,1006,1000,999,1004,999,998,1004,999,999,1003,1003,998,998,997,997,998,999,998,998,999,1000,999,998,998,994,1005,1000,999,1006,999,998,998,1000,999,999,1004,1000,998,998,999,1000,999,999,999,1001,999,999,1004,1000,998,998,998,1000,999,1004,1004,1000,998,997,998,1005,998,998,998,999,998,1004,1004,999,996,991,992,994,1006,1001,1000,1002,1000,999,1005,1001,999,1004,999,999,998,998,998,1005,997,992,1004,1000,999,999,1004,999,992,1004,999,1000,1004,999,998,1000,998,998,1004,1000,998,998,998,1000,999,999,999,1000,1004,999,998,1003,998,998,998,1000,998,1004,999,999,998,998,998,1000,999,999,999,1005,999,998,1004,1000,998,998,998,1000,1004,1004,998,999,997,998,1003,1004,997,997,997,999,998,1004,998,999,998,998,998,1000,999,999,998,994,993,1005,1001,1002,1005,1005,1004,998,1003,997,997,997,992,992,1004,1001,999,1005,1005,1000,998,998,998,999,998,999,999,1000,999,999,999,1000,999,1004,1004,1000,998,998,998,1000,998,998,999,1000,999,999,999,1000,999,1004,999,1000,999,998,998,1005,999,998,998,1000,998,1004,1004,999,998,997,998,1005,1003,997,997,1005,998,997,1003,999,997,997,997,999,998,998,998,999,993,993,1005,1002,1000,1000,1005,1006,999,1003,998,999,998,998,998,1004,998,998,998,999,998,999,999,1000,1004,1004,1003,999,997,997,997,1004,1003,997,997,998,998,998,998,999,1003,998,998,1005,998,997,998,999,998,998,998,1000,999,1004,999,999,999,998,998,999,998,993,1005,1001,999,1004,1004,1000,998,998,998,999,997,993,993,1006,1001,1000,1005,1001,999,999,999,1000,998,993,993,995,1007,1002,1001,1007,1005,1004,999,999,998,1003,998,999,998,998,998,1004,998,998,998,999,998,998,1004,1004,997,1001,996,997,1001,996,996,998,997,997,1003,1004,996,1002,997,998,997,997,998,999,997,998,993,1006,1000,999,998,994,993,1006,1001,1001,1005,1005,999,1000,998,998,998,999,998,998,1005,1000,999,1004,998,993,1004,999,999,1005,998,992,1004,1000,999,999,998,999,993,1005,1000,1001,1004,999,999,1005,998,998,997,994,993,1005,1000,1001,1005,999,999,1000,999,999,999,1006,999,998,998,1000,998,1004,1004,1000,998,997,998,1005,1003,998,997,999,997,998,1003,1005,998,1002,997,998,997,997,998,1004,997,992,1003,1000,998,1004,998,993,993,1005,1000,1001,1005,998,993,1006,999,999,1004,999,992,1004,999,1000,1004,998,997,994,1004,999,999,1005,1004,998,997,999,998,1003,998,999,998,998,998,1000,999,999,999,1000,999,999,999,1000,999,999,999,1000,999,999,999,1001,999,999,999,1000,999,999,999,1000,999,1004,1004,999,1003,998,997,1004,998,997,998,998,998,1002,1002,998,996,996,996,1003,1003,997,997,997,991,992,1004,1001,999,1004,998,994,1004,999,999,1005,1004,998,998,1005,998,997,996,998,1001,999,998,1000,1004,998,998,1000,998,998,1004,1000,998,998,998,1000,999,999,999,1000,1004,999,998,1000,998,998,999,1000,999,1004,998,994,1004,999,999,1005,1004,998,998,999,998,997,1003,999,997,997,997,999,1003,998,998,1000,998,998,998,1000,998,1004,998,994,992,1005,1000,1001,998,993,993,1007,1001,1000,1005,1006,999,998,998,1000,1004,998,998,999,998,998,998,1000,999,999,1004,1000,998,998,998,1000,999,999,999,1000,999,999,1004,1000,998,998,998,1000,999,999,999,1003,1004,999,998,1000,998,998,1004,1005,1003,997,997,998,1003,998,997,999,998,998,998,1005,998,998,998,999,998,999,999,1000,999,999,999,1000,999,999,999,1001,999,999,1004,1000,998,997,993]},"stackTable":{"length":2599,"prefix":[null,null,null,null,3,null,5,3,null,8,null,10,11,null,13,null,15,10,3,null,19,8,null,null,null,24,null,10,24,28,3,null,3,null,19,34,null,null,37,24,null,null,3,null,43,15,null,46,28,null,null,19,51,null,null,8,null,null,57,28,15,null,28,null,63,28,10,8,null,null,69,null,71,43,69,19,null,null,77,77,3,null,15,28,15,28,28,null,null,88,37,null,19,92,null,null,28,28,null,98,null,null,101,15,null,104,8,null,24,28,15,null,69,10,113,null,77,69,null,118,63,28,15,122,71,null,125,3,28,null,null,130,28,null,19,null,8,3,null,138,28,null,141,null,null,8,10,146,71,104,8,28,null,null,153,69,10,63,null,null,null,160,46,15,8,null,165,3,138,null,169,null,88,null,19,174,null,176,null,178,28,null,null,15,null,null,10,null,null,null,189,190,77,null,104,null,195,28,null,198,10,null,69,69,15,204,8,10,null,190,null,210,null,null,null,19,8,28,10,218,69,null,77,null,8,71,15,37,10,160,null,37,null,15,28,null,10,28,null,178,8,null,241,242,null,15,125,43,10,null,8,null,71,15,253,null,10,256,null,null,8,28,198,15,88,69,19,266,24,null,77,8,28,null,28,10,69,77,190,8,null,280,15,69,37,10,285,28,null,28,null,15,69,10,293,null,null,8,null,null,null,15,301,null,303,null,10,28,null,null,309,37,null,312,null,69,8,3,null,null,319,28,null,15,323,77,8,null,327,null,329,null,null,8,null,null,8,46,19,338,null,3,null,8,8,8,null,null,347,8,19,null,37,3,353,null,69,28,null,358,77,null,361,19,363,69,8,3,367,28,160,null,371,71,null,37,15,104,null,10,null,69,null,382,15,null,19,386,null,3,null,390,28,null,46,null,15,160,88,71,3,400,null,null,403,null,405,null,19,28,null,410,10,412,null,null,null,8,28,null,419,null,19,28,19,424,37,null,null,77,28,28,null,15,433,null,3,71,24,69,71,15,37,5,10,190,28,382,null,390,19,382,8,3,453,null,8,null,457,71,77,77,3,462,71,77,28,466,null,468,null,null,null,472,390,19,3,476,null,37,403,382,null,15,483,10,485,null,77,419,327,null,15,null,493,10,63,28,3,null,71,28,15,502,69,71,10,null,null,508,28,null,511,19,513,null,515,63,3,518,28,null,37,69,15,null,10,null,28,3,529,8,8,null,312,77,338,37,3,null,24,null,69,15,543,null,28,10,547,null,69,8,77,329,69,69,null,null,3,24,24,null,63,43,null,3,138,null,null,69,15,176,77,10,165,null,77,24,null,null,19,null,3,361,null,null,28,165,141,15,589,28,165,null,593,19,77,405,10,null,63,null,46,15,null,10,null,3,607,88,null,37,37,null,472,319,28,19,71,3,71,null,621,28,null,null,null,null,69,null,629,19,null,3,8,361,8,8,77,15,639,null,null,71,3,null,71,null,138,null,649,24,15,10,null,null,160,511,null,8,null,null,3,77,71,null,null,null,88,null,669,410,19,104,69,3,null,null,null,null,210,466,69,71,10,684,null,189,69,null,null,690,24,468,511,15,403,8,10,698,null,8,24,104,null,15,382,8,10,null,138,null,8,37,19,24,28,10,717,71,71,468,511,null,723,null,19,71,77,10,729,null,69,8,null,8,69,15,null,77,10,160,190,null,743,403,28,null,28,19,37,77,3,752,null,null,null,8,10,null,null,null,null,null,8,19,null,766,null,3,138,28,88,null,8,15,null,10,8,null,69,null,405,15,28,null,3,390,77,210,28,28,28,null,8,null,795,303,13,15,77,19,null,37,19,28,77,10,null,808,515,37,8,71,138,28,15,515,424,null,null,3,511,null,null,138,37,15,210,null,19,null,10,28,3,24,null,null,28,null,165,null,125,511,15,844,46,null,10,3,28,71,77,15,19,null,3,63,null,28,160,382,15,862,8,10,37,462,null,868,null,870,19,28,10,874,493,null,3,null,3,880,8,8,null,15,419,511,10,69,390,3,891,null,null,28,69,null,897,77,37,19,null,547,169,327,3,906,null,69,101,15,null,77,10,28,8,19,10,28,3,920,13,511,390,null,15,926,10,28,303,190,null,37,null,19,null,3,937,24,28,null,15,942,null,77,28,190,71,511,63,37,null,77,15,954,955,160,10,37,3,138,511,28,493,null,390,8,10,null,529,null,280,629,77,69,15,28,19,10,979,980,null,null,28,null,28,null,511,null,113,null,69,null,null,104,null,19,10,998,405,468,null,1002,210,410,null,3,15,160,19,1010,null,3,null,327,71,null,8,15,28,28,19,1022,8,3,1025,280,null,210,null,15,403,77,10,1034,280,468,309,621,69,8,19,160,3,69,43,null,77,15,null,3,28,160,125,null,1055,null,15,138,28,19,1061,null,165,329,190,8,160,28,15,null,null,null,3,28,28,null,null,19,8,63,329,37,69,null,null,593,280,10,3,69,690,160,null,null,19,77,10,1098,null,241,1101,190,43,515,69,15,null,19,1109,419,3,71,28,160,null,null,10,10,1119,null,1055,8,69,382,15,1126,138,10,28,190,71,69,null,null,null,null,3,210,468,null,69,15,165,10,null,3,null,198,null,1150,69,15,468,104,10,77,3,1158,118,71,28,15,69,10,28,null,468,null,1169,77,69,743,null,1174,71,null,19,382,null,3,37,28,null,null,null,19,null,1188,361,3,null,138,160,1188,329,69,19,8,null,10,null,null,43,null,808,241,1207,15,1209,null,1211,10,1213,329,null,null,69,71,37,511,null,329,15,null,10,null,null,1228,190,329,null,8,515,37,15,125,19,1238,3,28,null,null,8,null,15,280,15,1248,77,10,null,10,1253,77,28,null,77,19,null,10,1261,69,8,null,null,1266,null,null,1269,43,19,382,280,3,43,8,28,1228,null,null,15,1282,77,868,309,210,8,null,8,15,19,1292,null,3,649,24,160,null,1299,28,15,511,null,8,3,138,null,125,null,63,19,28,28,null,null,null,43,69,28,null,1321,15,null,19,3,null,69,15,null,28,19,1332,8,3,24,515,15,24,77,515,24,null,28,null,null,382,8,19,24,10,null,null,8,8,515,15,null,null,null,3,null,8,382,null,28,null,null,8,15,19,1371,8,71,1055,8,null,15,null,8,10,3,1382,98,88,69,468,null,1388,37,19,515,37,28,165,57,77,1266,15,37,10,920,303,511,1169,77,15,19,1408,3,37,28,511,15,28,1292,104,3,71,743,8,null,null,15,1424,10,77,3,28,138,13,null,null,1433,71,19,28,28,10,1439,71,null,280,null,3,160,24,null,37,null,19,165,10,1453,808,468,null,15,37,null,3,28,28,71,77,null,1466,19,382,3,1470,361,null,71,15,511,null,19,403,3,1480,125,8,160,280,15,1486,329,null,101,28,104,43,10,1494,511,null,190,69,71,19,24,3,71,69,104,null,null,1508,210,null,19,28,293,3,1515,1101,169,198,null,null,1521,19,10,621,3,24,891,77,10,null,28,3,1533,28,null,511,37,null,1539,null,1541,19,319,28,729,28,190,37,104,77,null,77,19,28,28,10,1557,361,190,null,null,1562,160,15,1169,10,690,3,1569,160,125,176,15,19,1575,3,1541,160,null,1150,null,null,1583,69,19,468,null,10,1589,1590,69,28,null,511,null,15,1597,37,10,28,8,24,104,null,null,19,8,390,null,1610,null,null,77,19,28,28,10,null,190,69,8,28,165,37,15,210,10,410,28,3,24,511,71,15,null,24,19,1638,8,303,280,15,null,1253,1645,138,28,382,19,19,69,3,69,280,1515,null,null,null,null,69,19,28,69,10,null,3,24,null,null,null,1671,69,19,24,621,729,null,190,null,1680,69,null,138,69,8,15,28,160,19,8,3,160,303,160,null,null,1697,null,1699,19,515,8,1207,190,28,15,386,3,71,327,303,null,null,null,69,19,1541,28,10,1720,69,69,329,329,19,null,null,null,1729,19,28,10,1733,null,null,303,329,null,19,10,null,null,null,15,null,69,10,null,1749,8,null,3,43,69,15,8,10,null,3,1760,null,329,169,382,88,null,1767,null,15,1770,37,10,3,160,468,303,null,37,19,141,37,69,69,69,24,3,515,28,125,468,125,104,19,125,null,1796,3,8,63,361,312,8,19,8,8,10,3,1808,1809,8,71,405,24,19,null,1729,null,190,69,8,15,8,10,8,69,15,329,null,10,104,null,null,8,1541,15,null,329,10,3,24,77,null,1843,15,327,104,10,63,37,null,24,null,403,511,3,71,46,303,null,null,37,19,10,178,8,8,515,69,15,19,1871,69,3,null,160,null,null,1466,1575,515,71,8,160,15,1885,69,69,10,3,1890,8,37,46,515,15,28,24,19,1899,3,1901,43,138,null,382,15,3,71,null,1749,null,210,43,77,15,19,24,190,15,1920,10,190,69,8,69,160,15,1928,69,10,3,15,null,19,1935,1936,390,169,null,28,327,8,null,10,null,3,1947,null,null,390,1101,280,101,10,28,190,1055,390,15,28,329,null,3,69,104,null,1967,15,1969,77,null,1901,28,null,null,1976,8,null,19,28,10,1982,165,null,null,69,138,null,null,280,37,15,1188,390,71,3,71,511,null,515,3,8,null,2004,390,19,null,3,468,null,2011,24,15,1188,511,10,160,3,71,null,null,28,15,1638,null,3,138,77,null,77,19,null,8,3,2035,69,null,19,28,10,2041,null,303,189,24,808,37,10,null,280,57,8,138,15,468,3,138,28,null,null,2061,8,19,327,3,null,69,null,77,382,null,15,69,null,189,63,77,37,160,15,2081,8,69,10,null,130,37,10,71,3,2091,null,null,24,15,null,10,null,null,28,327,null,null,10,2105,77,329,69,19,null,165,3,69,241,8,10,410,190,null,8,15,104,468,24,160,10,null,165,28,241,null,8,null,null,2135,303,868,210,98,104,null,2142,15,88,10,280,3,37,24,43,null,327,null,2154,15,28,10,null,3,37,1266,28,null,28,null,1126,63,160,null,88,1209,10,71,3,138,71,371,160,766,19,3,71,null,28,15,138,3,468,511,null,19,28,8,10,24,71,null,15,8,19,8,649,280,88,511,null,795,28,19,3,null,null,15,null,10,28,28,3,null,null,15,null,10,77,1890,723,280,8,null,28,null,15,null,2234,10,28,382,null,null,null,8,19,2105,190,8,8,null,2248,77,15,10,190,1169,null,8,15,280,19,2259,3,28,null,69,15,37,19,10,2268,69,309,1843,77,28,15,2275,10,null,3,327,null,null,null,2283,390,null,19,null,1253,28,28,312,null,926,10,2295,null,null,493,19,8,77,24,28,69,15,null,3,1843,303,null,2311,19,10,327,null,77,69,165,8,10,303,400,null,null,2325,24,null,10,28,8,null,2332,19,null,3,868,8,15,19,2340,3,327,160,null,723,69,10,null,511,37,77,69,19,28,190,69,69,15,28,160,34,3,77,null,2135,954,8,10,1228,309,71,43,null,160,15,null,10,28,null,2380,15,511,309,69,null,165,323,10,null,3,24,125,511,null,null,88,46,88,19,2400,69,10,2403,69,71,28,69,37,15,125,160,null,null,3,390,null,15,160,71,3,28,71,303,280,null,null,2427,77,77,19,3,515,511,null,37,390,19,28,null,3,null,468,null,2444,69,15,19,8,3,2450,null,2452,null,null,1935,2456,8,453,327,8,1248,8,160,327,null,71,19,null,3,71,null,2472,24,null,19,361,28,77,8,null,19,null,10,2484,808,3,2487,2488,null,77,15,null,10,2494,null,null,8,15,null,2500,3,null,null,15,null,19,2507,1034,null,190,null,28,null,88,15,327,28,468,37,15,null,589,10,63,15,2507,null,3,210,null,null,138,19,104,160,10,2537,361,15,2540,43,10,8,null,3,28,160,69,15,8,19,468,null,10,390,403,19,1494,329,160,null,63,15,2259,3,null,8,null,null,28,19,2572,3,43,null,88,690,10,null,null,329,null,69,null,2585,69,69,19,24,808,312,88,1486,8,10,null,190],"frame":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,52,147,148,149,150,151,152,153,154,155,156,157,158,159,160,119,161,162,163,164,165,166,167,168,169,170,171,172,173,174,119,175,176,177,178,179,180,181,182,183,184,185,186,28,187,188,189,190,191,192,193,194,168,195,196,197,198,113,35,199,200,201,132,202,203,204,205,206,207,208,209,210,173,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,172,173,244,245,173,246,247,248,249,99,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,12,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,14,306,307,308,309,310,311,312,258,313,314,315,316,317,318,319,320,321,322,12,323,324,325,326,327,328,329,330,331,332,333,334,335,280,12,336,337,338,339,340,341,342,343,113,35,344,345,322,12,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,288,371,372,373,374,375,376,35,377,378,379,380,381,382,383,384,385,153,34,35,386,387,388,389,390,391,119,392,393,394,146,395,396,397,398,399,400,401,402,403,52,404,405,406,407,408,409,410,411,412,269,413,414,415,416,417,418,419,420,172,173,421,422,423,381,424,425,426,34,35,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,6,443,444,51,52,363,364,445,446,349,447,448,449,450,99,451,452,453,454,455,456,457,458,123,459,460,461,462,463,464,465,466,467,468,35,469,470,471,472,473,474,475,476,477,478,479,480,481,482,210,173,483,484,485,486,487,488,489,490,491,492,493,494,146,52,495,496,92,93,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,14,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,99,210,173,535,536,537,168,538,539,540,324,541,542,543,349,544,545,546,547,548,93,549,550,551,552,553,422,321,554,555,556,557,558,559,560,561,562,563,564,565,566,567,6,568,569,570,571,572,573,574,575,245,173,576,577,578,579,580,581,582,583,584,99,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,6,614,331,615,616,617,618,619,620,621,622,119,623,624,625,626,627,628,468,35,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,458,123,646,647,648,649,650,349,651,652,653,654,122,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,239,690,691,692,693,591,694,695,696,697,698,699,700,701,702,703,704,203,705,706,707,708,709,710,168,711,712,713,714,715,716,717,718,321,719,720,721,722,723,724,725,726,727,728,729,314,730,731,732,733,734,735,736,52,737,738,739,740,741,742,743,744,745,99,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,520,766,767,768,769,770,771,772,773,774,775,776,777,376,35,778,779,780,114,781,321,782,783,784,785,786,52,142,787,788,789,11,12,790,791,792,793,794,795,796,797,798,799,12,800,801,802,803,804,379,805,806,807,808,364,540,689,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,349,825,826,827,322,12,828,829,830,831,832,833,834,835,836,245,173,837,838,839,616,824,840,841,842,843,844,845,846,847,848,849,809,233,234,850,851,852,853,854,855,856,813,857,858,859,860,861,254,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,35,881,882,883,884,885,886,887,172,173,888,889,890,891,892,669,893,894,700,895,896,678,897,898,899,900,901,902,903,904,905,906,376,35,907,122,123,908,909,540,910,911,99,912,913,440,914,915,916,669,917,918,919,920,921,922,777,923,924,925,926,927,928,929,930,931,932,933,934,935,936,122,123,937,938,939,177,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,203,960,961,962,963,349,964,965,966,967,968,969,473,970,871,99,140,971,972,973,974,975,439,678,976,977,978,187,979,980,981,579,548,93,982,47,983,984,985,786,395,986,987,988,554,989,990,991,992,993,994,995,888,649,996,997,998,863,999,1000,1001,1002,888,1003,1004,1005,1006,1007,985,1008,1009,253,173,321,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,912,1024,1025,769,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1010,1036,1037,1038,1039,1040,844,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,677,440,1054,14,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,736,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,290,1075,1076,1077,548,93,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,810,1092,1093,1094,1095,1096,1097,253,173,192,1098,1099,1100,99,1101,1102,803,1103,1104,148,1105,1106,1107,1108,1109,335,1110,1111,1112,440,1113,794,1114,627,1115,1116,1117,1118,786,52,1119,1120,381,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,170,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,349,1145,1146,1147,1148,1149,1150,1151,1152,1153,458,123,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,579,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,810,1191,1192,14,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,14,1206,1207,1072,1208,1209,1210,1211,1212,888,1213,464,1214,309,1215,1216,1217,1218,1219,280,12,1220,1221,1222,1223,1224,1225,1226,443,1227,1228,1217,1229,1230,1231,439,1232,1233,1234,1235,1236,1237,119,1238,1239,379,1240,1241,1242,1243,1244,678,1245,1246,1247,1248,183,1249,1250,1251,1252,1253,1254,1255,376,35,523,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,113,35,1270,1271,1272,1273,1274,1275,155,1276,1055,1056,1277,1278,1279,1280,11,1281,1282,1283,99,1284,1285,354,896,678,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,511,1296,1297,1298,412,1299,579,1300,488,1301,123,379,203,813,1302,1303,691,1304,1305,1217,1306,1307,1281,1308,1309,1310,1311,1244,678,1312,1313,743,1314,1315,79,1316,203,1317,349,1318,1319,1320,1321,1322,635,1323,1324,1325,1326,1327,1328,761,762,344,400,1329,1330,1022,1331,1332,1333,1334,14,1112,678,1335,1336,47,1337,1338,123,1339,168,1340,1341,1342,1343,1344,99,1345,1346,1347,1348,809,233,234,1349,1350,1351,1352,1353,548,93,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,89,1364,99,1365,1366,1367,1368,1369,1370,1371,1372,369,1373,1374,1375,1376,1377,1378,813,1379,691,1380,1381,99,1382,1383,1384,1385,1386,403,1226,1387,1157,1388,1389,1390,233,234,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,14,1416,405,1417,1418,123,1419,1420,1421,1418,1422,1423,845,1424,1425,1426,1427,1428,1001,1429,200,1430,1072,1431,1432,1433,763,1434,1435,1436,1437,1438,1439,936,1440,356,93,1441,1442,1443,1437,1444,1445,1446,1447,1448,379,1449,1450,309,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,403,52,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,438,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,616,617,1486,1487,381,1488,1489,1490,1491,1492,363,93,1070,1493,1494,1495,1496,1497,1498,1499,1120,888,1500,1501,1502,1503,1504,1505,1506,1507,368,1508,1509,1049,1510,1511,1512,691,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,870,233,234,1523,1524,813,1525,1526,1527,1528,1529,1300,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,540,1545,1546,1547,967,1548,1549,1550,1551,99,1552,1544,1107,1553,1554,1555,1556,1557,1558,1459,1347,1559,1560,976,1282,1561,1562,1563,1564,853,1565,1566,1567,1568,1569,1570,1055,1087,1571,1572,1573,1574,1575,1576,983,655,1577,1578,1579,1580,1581,1582,1583,1584,1585,403,395,1586,1587,1588,1589,1590,1591,1592,11,12,146,52,1593,1594,1595,1596,724,1597,1598,1599,381,1600,142,190,1601,1602,1603,1604,775,253,173,1605,1606,1607,1608,1609,1610,34,35,1611,1612,724,1613,1614,870,233,234,1615,888,1616,1617,940,1618,1619,1620,1621,51,52,1622,1623,1624,1625,1626,540,1627,1628,1109,349,1629,1630,1288,1631,1632,1633,1634,1635,1636,763,1637,1232,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1200,1650,1651,1652,1653,1654,1655,1656,1210,1657,1658,1659,1660,1661,1662,1663,1664,1665,1282,1666,1667,1668,540,1669,1670,1671,1672,625,1673,321,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1089,52,1685,919,1686,1687,1688,1689,1690,1691,1692,1338,123,1693,1694,1695,1696,1697,47,1698,1699,1700,1701,1702,1703,1241,1704,1705,666,1706,1707,1130,1708,1709,1710,1711,1712,1713,1487,1714,1715,327,1716,1717,1718,1719,1720,73,1721,1381,1722,1723,39,1724,1725,1726,1727,799,12,1728,1729,919,1730,153,1731,1732,1733,1190,1734,1735,1736,1737,324,1738,1739,1740,1741,1742,1743,1744,1745,1746,364,1747,1748,1749,1750,1751,1515,1752,1753,1754,1755,1756,131,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,960,1767,1768,1769,1770,1771,1772,864,1773,349,670,1774,1775,1776,311,1777,826,1778,1779,1780,1781,379,1775,1782,1783,1784,691,1785,1606,1786,1787,1788,1789,540,1790,1791,413,1792,52,1793,1794,1795,1796,1232,1797,1798,1799,1800,1801,1802,230,153,1803,1804,1805,1806,1807,1808,1809,1810,467,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1435,1006,1821,540,1822,1823,1824,1825,1826,1827,161,1828,1829,1830,454,1831,1832,1833,1834,1065,1835,1836,1837,1838,921,1839,52,14,1840,1841,1842,1843,1844,1845,1846,349,1847,1848,1849,1850,1851,1852,1853,513,93,1854,1855,1856,1857,1459,1858,1859,1860,1861,1862,1863,1443,1864,1865,1301,655,1866,1867,1868,1869,1870,1871,1872,823,617,1873,39,203,1874,1875,1055,1582,1876,1877,1104,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,667,1888,1594,1889,1281,1890,1087,1891,1892,1276,1089,1893,1894,1895,1896,1897,1898,1899,1900,813,1901,1902,744,1903,1904,345,1905,1906,1907,908,1908,1909,1547,114,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1371,47,1922,1923,1746,93,1924,326,1925,1926,119,1927,1928,1929,1686,783,1930,1931,1409,1932,755,1933,1934,1935,1936,1937,114,1938,1939,1940,203,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1022,1799,1955,623,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1624,794,1968,1086,1087,1969,1301,123,1970,1971,1972,1973,1974,1975,89,1976,1977,1978,292,1336,1979,1980,1981,1982,1983,1984,1985,314,1986,1987,1988,1075,1989,1990,1991,817,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,99,2004,2005,324,2006,786,52,2007,2008,2009,2010,871,540,2011,254,1438,2012,1582,2013,2014,1082,2015,2016,18,2017,2018,2019,2020,6,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,799,12,1930,1091,871,381,2031,2032,2033,2034,2035,349,2036,2037,2038,2039,2040,1019,1147,2041,2042,2043,2044,245,173,678,2045,868,2046,1420,2047,2048,2049,2027,1854,1274,2050,2051,2052,254,2053,2054,2055,234,2056,2057,25,2058,2059,2060,2061,2062,2063,2064,2065,882,1190,2066,632,361,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,168,2080,1232,1157,2081,2082,960,2083,1319,2084,2085,2086,2087,2088,2089,969,473,2090,635,2091,2092,47,2093,2094,2095,1577,2096,2097,2098,142,2099,2100,2101,2102,2103,2104,2105,12,1919,2106,2107,2108],"category":[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],"subcategory":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]},"stringArray":["0x7f7021f6308b","sign_44","0x91b19","0x913b6","0x905aa","0x72bdd","0x92883","libc.so.6","0x1a0998","0x72ea4","0x92aa8","0x11e1e","0x9051f","0x73e38","0xce94","0x9484f","0x1a1004","0x903ad","0x6c112","0x74037","0x70339","0x9046a","0x736b7","0x11cdc","0x939c7","0x93857","0x93e8f","0x1a0745","0x949c9","0x72129","0x98f6a","0x98d1a","0x732b2","0x91e2a","0x6d1f5","0x9243a","0x73879","0x78c34","0x96887","0x92ab0","0x10db4","0x995c9","0x94d79","0x9141f","0x6e567","0x92fe5","0xf150","0x6b19c","0x92ae3","0x1a1009","0x98446","0x93b47","0x948dc","0x73f0a","0x78c14","0x923b7","0x92b43","0x11c6b","0x9284c","0x90873","0x9a343","0x98a0e","0x70009","0x92f24","0x9841e","0x92fcd","0xde2a","0x98d20","0x70530","0x121e1","0x93cd7","0x92aa0","0x115e0","0x92ab8","0x105bd","0xf3b5","0x11843","0x6e4dd","0x9342d","0x92ac0","0xf9e3","0xf9bc","0x6bd51","0x923d3","0x6a87d","0x98cb3","0x72adc","0x98992","0x989cb","0x9139a","0x94f17","0xed6e","0x10be0","0x92b52","0x732cc","0x78b89","0x91abf","0x9364a","0x98c2f","0x98a1a","0x94ef7","0x1a09ad","0x9535a","0x93488","0x1a0977","0x7156c","0x94f1f","0xf17f","0x1208c","0x8f322","0x9928d","0x98c9c","0x71959","0x91e14","0x113ad","0x73495","0x78c3a","0x91956","0xfc6f","0x11444","0x957c5","0x1a100e","0xda15","0x98bfe","0x73756","0x78be4","0x101b4","0x9493c","0xec90","0x70b90","0x9882a","0x91a77","0x9722d","0x1a0a2c","0x98bd3","0x91ab2","0x70c2a","0x92909","0x11bfa","0x73c57","0x92fc5","0xd055","0x98982","0x9491c","0x1a09a2","0x9018d","0x9534d","0x12012","0x733e4","0x1066a","0xf1e7","0x122be","0x981ed","0x94c87","0x90417","0x1a09fc","0x11a8e","0x73e46","0xda73","0x94d8a","0x9343f","0x93eb2","0x9a049","0x736ff","0x11c61","0x9492c","0xd7f7","0x6b9a9","0xd09c","0x94e29","0x1a098a","0x933d9","0xece6","0x933e8","0x73685","0x12ab4","0x92a0b","0x90ac8","0x9a13b","0x989b0","0x9535f","0x92b1f","0x7073b","0x9214b","0x952f9","0x6ddc0","0x93689","0x94772","0x90848","0x98a14","0xfe1d","0x93b6a","0xf0a5","0x8f7b3","0x11229","0x98a96","0x957b5","0x70c6e","0x92a96","0x1116b","0x11a2b","0x124b9","0x72309","0x94ec1","0x92a98","0x1a0985","0x93109","0x951d9","0x91aa4","0x6b846","0x121b4","0x97fa1","0x73dcc","0x113f6","0x92b3e","0xfaf9","0x91337","0x11dbd","0x10041","0x72b5e","0x109db","0x708e2","0x9a0c8","0x92473","0x10c8b","0x956e7","0x6a51f","0x98c03","0x9128a","0x6d92a","0x98a99","0x947c7","0x9a358","0x11b53","0x947ef","0x78b12","0x12abb","0x94d95","0x6b151","0xef26","0xf285","0x7218b","0x91d01","0x1237b","0x93038","0x1003d","0x92339","0x732a6","0x94f99","0x94ed0","0x12356","0x98d02","0x6c383","0xeaae","0x11353","0x73a84","0x12ab8","0x99584","0x91c01","0xf64b","0x12a78","0x98a31","0x8f9d3","0x98d7b","0x6b7fe","0x118a3","0xfba4","0x985b3","0x11c70","0x94f07","0xdb91","0x70701","0x11847","0x10f58","0x73226","0xce82","0x98324","0x90f8c","0x98c80","0x94c18","0x73ed6","0x115a1","0x736f1","0x91951","0x9199b","0x11bbe","0x950bf","0x91a1d","0x91944","0x6ca3f","0x1a1033","0x94934","0xe285","0x91cca","0x6d546","0x98cb8","0x94c94","0x90a9d","0x993ab","0x108a9","0x9676f","0xd08d","0x929b0","0x11a33","0x12042","0x73882","0x92406","0x934e4","0x989fe","0x94d70","0x73769","0x78c04","0xfcf5","0x129b0","0x94814","0x94f0f","0xe54b","0x9105b","0x93349","0x1230d","0x93677","0x96b78","0x126e3","0x1a1013","0x73af0","0x932b9","0x73b86","0x91b4d","0x11c5c","0x11ce1","0x11cbe","0x9584e","0x8ff91","0x1170d","0x1276b","0x733a8","0x924bd","0x10b85","0x92249","0x11a64","0x98a3f","0x78c84","0x97e66","0xf5e0","0x9480c","0x114b8","0x110ea","0x124c5","0x98ae5","0x9a1e9","0x94e5f","0x1a1036","0x10479","0x92427","0x1081b","0x6ed26","0xf063","0x912b9","0x705ea","0x94d9e","0x11653","0x94944","0xeff9","0x6a60e","0x943f0","0x73df2","0x78b8d","0x91976","0x71cdc","0x92fdd","0xedd6","0x98d2e","0x91c38","0x92881","0x6b79f","0x9a2b1","0xede9","0x10141","0x73fcb","0x91d55","0x92ad3","0x1a0980","0x93782","0x1a096e","0x9672c","0x6c025","0x985a2","0x9376c","0x91303","0x91e71","0x92415","0x120f5","0x98c54","0x94883","0x954b0","0x6dad9","0x98ce1","0x78c18","0x10de8","0x92e24","0x90e95","0xfca7","0x98b24","0x98eb5","0x94ff8","0x73bc2","0x94c99","0x70626","0x10259","0x9941e","0x112df","0x1031f","0x722e5","0x10ae1","0x1a0991","0x989c3","0x989fb","0xf2ed","0x91324","0xef2b","0x70ee4","0xf03c","0x11b67","0x91b2b","0x11c7a","0x974dc","0x10575","0xf8dd","0xf8f5","0x102d3","0xfe29","0x97fe5","0x1a0866","0x94924","0xcf0e","0x91adf","0x92da4","0x8fe6c","0x11da5","0xea4b","0x73822","0x737e7","0x78c2d","0x91b87","0x10f4d","0xf3d5","0x91922","0x91141","0xf850","0x11ccb","0x91c77","0x6e10d","0x953c0","0x6cd9d","0xda4b","0x98c2c","0x7392d","0x95bb5","0x10361","0x9842f","0x73b55","0x1138d","0x1046d","0x735c8","0x94d8f","0x8f938","0x10e8b","0x983bc","0x94eff","0xd5bd","0x73c76","0x92fd5","0xe42f","0xd979","0x73b60","0x78bf4","0x98a58","0x948c2","0x1072f","0x115f0","0x71cc2","0x91b3d","0x6d303","0x93337","0x98b94","0x11e16","0x11cb0","0x9459c","0xd3c4","0xfb87","0xce9c","0x10a2d","0x726ac","0x936ee","0x992e4","0x9344e","0x116dc","0x92c4f","0x98b60","0x96369","0x11336","0x12446","0xfce0","0xe67d","0x112a4","0x11982","0x91b6c","0x91ec2","0x70e66","0x9939a","0x99341","0x91a84","0xd868","0xf3c7","0x93b12","0x6ae67","0xd5e5","0x92838","0x92142","0x117af","0x7135e","0xf60a","0x7191a","0xdf01","0x91b5f","0xf8e9","0x99592","0x9545f","0x9235b","0x728fc","0x95257","0x71d67","0x11757","0x94ea2","0x92392","0x98564","0xde45","0x98847","0xd7bd","0x953fd","0x6d37b","0xf7e8","0x1a097b","0x917f8","0xdd39","0x91aec","0x6edc4","0x94d7d","0x6baf5","0x93288","0x73aaa","0xe83f","0x948f0","0x10937","0x10e0a","0x9118b","0x98a68","0x72e31","0x1012e","0x72737","0x1009f","0x93759","0x1a0730","0x98abe","0x91b9a","0x95365","0x91a5d","0x8fb8c","0x11225","0x9347a","0x70804","0x948e7","0x6a7dc","0x11b61","0x1109c","0x12383","0x11ede","0xf665","0x933df","0x92211","0x102c0","0x6e0e3","0x92fa3","0x10449","0x92b3a","0xd092","0x94df7","0x994b8","0x73955","0x70027","0x950c8","0x935d8","0x9a3ec","0xcf25","0x977cb","0x126de","0x94d38","0x91c89","0x708f7","0xfa69","0x10699","0x92ef3","0x94cf8","0x95818","0xeab3","0x8fd1b","0x1015d","0x1a0918","0x71685","0xf2e8","0x11456","0x72b64","0x93e7d","0x94a54","0x91b5a","0x93cd0","0x1a0875","0xff45","0x7338a","0x78c08","0x94a68","0x993e7","0x111d1","0x932b4","0x934d3","0x99232","0xd45b","0xd537","0x73fa7","0x1a0937","0x11bcf","0x9234b","0x120f0","0x99503","0xf419","0x952f4","0x6b02c","0xf498","0x1217d","0x71b03","0x9108d","0xd1f5","0x922a5","0x1243d","0x106a0","0x6d9f4","0x993c4","0x98492","0x103e3","0x10231","0xd05a","0xcf96","0x9540d","0x91c26","0x6b789","0x10555","0xfb8b","0x78be8","0x922fa","0x1135c","0x12148","0x91c5b","0x11fae","0x11398","0x6f9af","0x9645b","0xfbfb","0x6f332","0x9a39d","0x98bbf","0x929fb","0x1a0a0a","0x1a09a6","0x98a4a","0x977c2","0x984a0","0x7390e","0x10ca1","0xf655","0x73c9e","0x78c24","0x90e5a","0x91d2e","0x932a7","0x11de9","0x72eee","0x94567","0x95278","0x919bf","0x9476c","0x978b9","0x12612","0x9483f","0x1a0a2f","0x91504","0x6af4f","0x98a4d","0xea7e","0x92b28","0x11f45","0x6fa8a","0x9431a","0x6a2a6","0x1222a","0x93187","0x1155e","0x94e85","0x6ff50","0x98a47","0x92e40","0x6cf4f","0xec88","0xfab2","0x98438","0x98b69","0x985f1","0x92eea","0x11bf0","0x901e6","0x11b3e","0xe5d3","0x731ba","0xfa64","0x6d9a9","0x92859","0x10d9e","0x73ece","0x9837e","0xfaef","0x6cd78","0x9482c","0xf8af","0x10d13","0x11b7a","0xff13","0xd1e3","0x982a3","0x720d4","0xe313","0x92b24","0x948d4","0x73ae6","0xd2c8","0x9192b","0x92156","0xd1b4","0x10eb1","0x6d847","0x933c3","0x7152b","0x9343a","0x6f5c4","0x9899a","0x733b0","0x994da","0x94247","0x94133","0x98a52","0x941b4","0xdeb5","0x976d5","0xeb29","0xd6e1","0x73aba","0x78bb0","0x1a0f00","0x91a97","0x6dfed","0x984b4","0x1034f","0xfc8b","0x6fafd","0x73777","0x94b79","0x6b133","0xd790","0x93609","0x989ce","0x9a340","0xf5cd","0x12437","0x70f41","0x10711","0x94e39","0x8ffa1","0x10d19","0x71dbf","0x983b4","0x737c4","0x94bea","0x73e86","0x91068","0x11f9d","0x12a1d","0x935f7","0x7201e","0x1a1024","0xd48b","0x6b713","0x11a7c","0xed5c","0x73312","0x94c09","0x911aa","0x98b71","0x11864","0x8fa6e","0xfbdc","0x10d2f","0x6fa3d","0x937fe","0x78705","0x78b0b","0x922e8","0x112d2","0x1a0973","0x6deaa","0x94353","0xf8d7","0x6d248","0x98ba8","0x11f68","0x7007e","0x70c55","0x98a1f","0x73eb0","0x78c00","0xd5cf","0xecf8","0x9536e","0x6e06c","0x98a7b","0xe075","0x98c42","0x91c60","0x10bfd","0x95238","0x7085d","0x921e5","0x99302","0x98354","0x942db","0x924dd","0xfd1d","0x985bb","0x98b5b","0x104cf","0xcf2a","0xdc9d","0x10760","0x958e7","0xf639","0x9a07a","0x72a98","0x108a3","0x73c20","0xcf01","0xd650","0x98283","0x91c3d","0xee92","0x11b5c","0x72280","0x93c9d","0x9124f","0xdd29","0x1a092b","0xfdf4","0x1140a","0x6cce0","0x98c97","0x71ca8","0x78737","0x78b4a","0x1a0931","0x91118","0x91a07","0x98d90","0x92f2c","0x98c60","0x91bf2","0xd553","0x93cfa","0x95269","0x11234","0x92efb","0x936ca","0xef5f","0x91de7","0x6a4ad","0x1a0964","0xd177","0x8f6bf","0x1a091c","0x1a0911","0x91bc8","0x71448","0x9a31d","0x734bd","0x919b7","0x72c8b","0x935ca","0x12484","0x10125","0x9251c","0x11da0","0x6a819","0x98a17","0x98b84","0x11ecc","0xdb0b","0x91c8e","0x9426a","0x73d79","0xfb63","0x73407","0xdda4","0xd55f","0x99522","0x11aff","0x11d53","0x6dfa9","0x9a0e5","0x6e854","0x115fa","0x914a9","0xfe3c","0x6ba8b","0x93822","0x70dfc","0x982ca","0x9a3da","0xec15","0x90d92","0x1a1000","0x922a0","0x6b92c","0xd188","0x98c48","0x93224","0xd88b","0xe23f","0x1267f","0x99fa2","0x98cfd","0x71b52","0x943e8","0x94c04","0x92485","0x702dd","0x98bb3","0x981d5","0x913ff","0x8f4da","0x6e0be","0x126e9","0xddb3","0xe790","0x10e9d","0x1194c","0x91261","0x90e2a","0xdebf","0x6e4b6","0x6d081","0x11649","0x9a173","0x94c68","0x9244c","0x6a924","0xfa8f","0x73382","0x952e3","0xf2b8","0xe1d9","0x110d5","0x6d827","0x9431d","0x1a101a","0x6d00b","0x102e7","0x9a0ef","0x92f64","0x92327","0x9569f","0x1224e","0x11181","0xf221","0xd02a","0x725d6","0x988bc","0x10051","0x110e4","0x94d29","0x9587b","0x94e98","0x94ae9","0x6af8e","0x91cdf","0x1196e","0x6e07d","0x6ae78","0x92e0b","0x73c2b","0x950b9","0x94dd5","0x1a08fe","0x114a0","0x6f4a3","0xd65f","0x6d28d","0xfa42","0xffb7","0x98c6c","0x72e5e","0x119a6","0x6ea48","0x98527","0x94998","0xd18c","0x95b66","0x1a09ea","0xfb9b","0x1185c","0x1a09ef","0x8fe84","0x10155","0x92b94","0xf557","0x951d4","0x6b0b6","0x1092d","0x98b43","0x944d5","0x96bb0","0x90f5d","0x6e7fb","0x94824","0x11a16","0x6ba63","0x92b07","0xd54f","0x9a27f","0xe3af","0x11412","0x6d896","0x128bb","0x935ee","0x73773","0x91b30","0x95a2b","0xf351","0x92ff4","0xf7b8","0x788ea","0x12aab","0x937aa","0x73ad0","0x78bd0","0xe445","0x965a9","0x91b1e","0x1191c","0x1001d","0x10a53","0xd242","0x9122c","0x73f70","0x93589","0x725e6","0x94239","0x9481c","0x10a9d","0x98acb","0xe4bd","0x919e8","0x1238f","0x10b6f","0x6f239","0xee3f","0x70976","0x98a6b","0x941f8","0x9246e","0x11be1","0x92b36","0x6df01","0xdb15","0x73e18","0x78bcd","0xfc01","0x6ea85","0x91a69","0x7871e","0xfc5a","0x989eb","0x975de","0xfd7b","0x70c63","0x952e7","0x11d5e","0x91cb0","0x9373e","0x91523","0x8f66c","0xf1b4","0x73320","0xdf5c","0x6e077","0xf32a","0x12736","0x98975","0x950a7","0x91613","0x73be5","0xfb15","0x99365","0x12441","0x946bb","0x124ad","0x6e629","0x928fc","0x70582","0x98fb9","0x9a08e","0x96787","0xee51","0x98ac3","0x7144b","0xd44b","0x91da2","0x12285","0x72244","0xd63f","0x936f2","0x97360","0xded3","0x6cd91","0x98606","0x98c0c","0x94ebc","0x95b1e","0x92251","0xf502","0x11a9a","0x9839d","0x966cd","0x72e3f","0x911df","0x74018","0x71350","0x94547","0x116aa","0x705f2","0x9356a","0x98b3b","0x11e0a","0x71b2a","0x99415","0xe5d9","0x6b093","0x99437","0xf756","0xe477","0x9958d","0x93429","0x98cad","0x94f32","0x958b1","0xf35f","0x11c4f","0x6d298","0x991d5","0x7398a","0x91bed","0x925c7","0x12a0b","0x123e6","0xe703","0x94bf7","0x9176a","0x91d37","0x6ed9a","0x92fb8","0x11e04","0xf4ea","0x95264","0x9835c","0x9366a","0x90d08","0x12526","0x6c3d5","0x78753","0x11d58","0x103f3","0x123b8","0x942ae","0x72959","0x91bdf","0x1293a","0x71517","0x73695","0x78b7f","0x1a098e","0xeab9","0x1134b","0xd6e5","0x8f2bb","0x10c99","0x6a643","0x10d27","0x98cf4","0xdb9b","0x9a367","0xf8d2","0x71b87","0x6c683","0xe43d","0xd136","0x1a09e5","0xf936","0x6af60","0x6ec3f","0x10829","0x989f5","0xcfd4","0x71c7e","0x98b55","0x78c1b","0x6c1c4","0x10603","0x12a99","0x9398d","0x948e1","0x78c29","0x6c294","0xf6b2","0x72ae7","0x98e7b","0xd17f","0x95029","0x97ab3","0x10346","0x7138f","0x989ab","0x98b9a","0x73f2d","0x10681","0x94229","0xd800","0x8f34a","0x9a2eb","0x993bf","0x97396","0x10af7","0x92fbb","0x6f8bd","0xdf3b","0xd5b9","0x9551c","0x6ba94","0x108bf","0x95340","0x6e7e4","0x98a70","0x98d4e","0xfff6","0xfc87","0x900c9","0x121ca","0x732d8","0xf50a","0x116a4","0x94513","0x10661","0x70222","0xd1ac","0x91b7a","0x1a099d","0xea2b","0x12909","0x9a2c5","0xdd2d","0xce98","0xe6f5","0x90fa5","0x98255","0xf2fb","0xd113","0x942c4","0x98b02","0x11526","0x10365","0x6c6c3","0x99359","0x6e759","0x100c3","0x11800","0x9129a","0x8f684","0x100c7","0x92d60","0x98cb0","0x73e9d","0x93ca2","0x902c7","0x6c0f6","0x73b7b","0x6f792","0x994c2","0xfb01","0x6e6eb","0x933a8","0x989b6","0x98b78","0x943e5","0x1077f","0x900e1","0x934bb","0x6cc01","0x98502","0x78bec","0x98a55","0x982c2","0x10c6e","0xf876","0x8fe06","0xf8ed","0x6e684","0x98b1c","0x98def","0x9283b","0x93c1f","0x9a0aa","0x714d1","0x1a08fb","0x6ba53","0x9a102","0xeddb","0x6a609","0x73377","0x7013e","0x9a125","0x94d17","0x1a0737","0x95308","0x8f7fc","0x118cc","0x7100c","0xd3c0","0x944e1","0x117a3","0x98b4c","0x94c0f","0xd4e1","0x94a5f","0x10793","0x6fd16","0x982dd","0x122fb","0x995aa","0xf20e","0x96ccf","0x92f74","0x6d85a","0x1236c","0x93e9d","0x949b7","0x97a42","0xf8c5","0x6e0b8","0x98736","0x98317","0x73af9","0x94436","0x112ac","0x1223c","0x9882d","0xd87b","0x10945","0x72183","0x6f176","0x98a28","0x73b75","0xd5dd","0xff37","0x6d168","0x9436d","0x995e4","0x1258a","0xdea3","0x6dbc7","0x94ca8","0xd07c","0x98896","0xf4d5","0x6a8ea","0x73b8e","0x11ae1","0x6b7bd","0x113db","0xdc17","0x78bef","0x92623","0x91434","0x9226b","0x8ff3d","0x11828","0x6d821","0x989d8","0x118c0","0x72083","0x94526","0x72ebc","0x99519","0x94648","0x94293","0x8fd7f","0x11704","0x994e9","0x1a0a06","0x963b5","0x98a60","0x927d3","0x110da","0x933d4","0x11305","0x1266d","0x6f1bd","0x98b19","0x9a2f8","0x12701","0x9a294","0x9a132","0x92377","0x92943","0x8f7bb","0x11c3e","0x6e7dc","0xe1f7","0x12759","0x12aa8","0x98556","0x6f9a0","0xfe91","0x12432","0x94572","0x96740","0x92289","0x11287","0x6cc1c","0x98a8e","0x73b68","0x1117d","0x11092","0xe3b7","0xe293","0x6a830","0x91a36","0x92230","0x95c0c","0x1a09aa","0x6cba5","0x989a3","0x922ed","0x92de8","0xe7e5","0xe7c2","0x911ec","0x6aeb8","0x73c7f","0x9216c","0x91395","0x95149","0x92b49","0x114c2","0x7123f","0x90618","0x1a0969","0x12a82","0x959f5","0x7082e","0xf265","0x10f70","0x73ab6","0x121cf","0x6ebe8","0x94409","0x94305","0xe20d","0xefec","0xec03","0x8f529","0x10ad7","0x910ea","0x6f37b","0x7313e","0x9a325","0xd106","0xe77a","0x90e6d","0x107a3","0x10bf3","0x11380","0x119bc","0x11592","0x9964e","0x7131b","0xdfe2","0x98b3e","0xd4c3","0xec56","0x6ba97","0xe8e7","0x904cc","0x6f795","0x120fb","0xd8f3","0x11597","0xd443","0x12308","0x6d196","0x120a3","0x11d46","0x7339d","0x12840","0xff2f","0x99542","0x7363b","0x941b1","0x1a0734","0x948cb","0x11626","0x12371","0x714f1","0x122b2","0x70f2c","0x12072","0x1156b","0x6f3f8","0xe6d7","0x91a72","0x6b934","0xf4fc","0x90ebd","0x922cd","0x12845","0x71ae3","0x91315","0xe7ff","0x7328c","0x9936e","0xf7d4","0x92fbd","0x70dee","0x6ae1e","0xdec3","0x10725","0x949c4","0x9931c","0x9202f","0x6d5f4","0x104f3","0x94398","0x972b8","0x106c8","0x6c047","0x9a1fd","0x126f5","0x12946","0xe3a1","0x1147a","0x6f3b6","0x11639","0x7343f","0x93577","0x9a009","0x90ed8","0x91fe9","0xe5ef","0x102c4","0x11bdc","0x9a021","0x736d1","0x78bc4","0x11852","0x11570","0x6eb9f","0x11cf0","0x10e6d","0x1a0ff3","0xe49f","0x6ece8","0x98252","0x994d4","0xf068","0xd1bc","0x936c6","0xf1a6","0x6cf42","0x10213","0x92f14","0x9259d","0xfac8","0x6ae29","0x6e789","0x996e9","0x6ff33","0x989f8","0x1163f","0x11c3a","0x11401","0x9a49c","0x117ea","0x708a3","0x6f34e","0x9279c","0xee1c","0x9278b","0x98233","0x12a8a","0x9299b","0x6f7a4","0x942b1","0x963a5","0x93445","0xed74","0x1a090b","0xd85b","0x72eff","0x98c23","0xe820","0x6a55b","0xdfee","0x91ad5","0x6fa93","0x11622","0xf569","0x927e3","0x73ff3","0xfdec","0x93194","0x78c10","0x98bf6","0x9195f","0x8f6d7","0x1a102e","0x12532","0x92d13","0x6cf9c","0x98a2e","0x73a94","0xdc05","0x95137","0x911a5","0x118ae","0xd073","0x963ce","0x948d8","0x109c5","0x6f6b8","0x105db","0xeb37","0xff33","0x6d79d","0x1069c","0xcf92","0x90072","0x6e644","0x125a8","0x8fd64","0xe948","0x73790","0x95a46","0x70daa","0x96e98","0x99448","0x6e561","0x103c7","0xd022","0x6b6d6","0x9a3af","0x6b4af","0x1060b","0x9355d","0x92686","0x98aee","0x90d77","0xd01a","0xfd15","0x93572","0xfc05","0x73185","0x91a54","0x11ef8","0x11ad7","0x96ac7","0x6a676","0x9858e","0x730aa","0x94578","0xe0ac","0x992b9","0x99319","0xfbbe","0x10a45","0x943ba","0xdcab","0x11f04","0xd272","0xd183","0x6f0cc","0xd00c","0x98cd8","0x94213","0x9677f","0x12a59","0x7002c","0x6a5bd","0x92d1c","0x119e4","0x93333","0xf6e4","0x95038","0x11a21","0x94155","0xd8f9","0xf829","0x10f3f","0x9a320","0x12036","0x1145c","0x93709","0x1088c","0x6dd10","0x10532","0x789a6","0x91084","0x9435b","0x99508","0x9254a","0x70752","0x941cc","0x9657b","0x98a78","0x11c2b","0x91ada","0x90466","0x736ab","0xfbe9","0xe429","0x119c7","0x6ee6e","0x94193","0x6a57c","0x116b4","0x78a1e","0x11bab","0x6de81","0x98d17","0x9149c","0x12237","0x71869","0xf130","0xd6db","0x9945e","0x9a1ca","0x72dd9","0x94737","0x98c10","0x789f6","0x919ae","0x11ccf","0x922c0","0x9539d","0xe29b","0x1a094a","0xf213","0x8f521","0x713c4","0x6e0de","0xd967","0x6cbdf","0x107fe","0x96fe2","0x129a4","0x90214","0x714a0","0x6d8d7","0x93420","0x6e0c6","0x10f55","0x98c75","0x96bcf","0x94442","0xd86d","0x9a2a0","0x976cc","0xec6c","0x6a67e","0x10243","0x71010","0xd673","0x10537","0x1a0fd2","0x73afe","0x6e055","0x1045b","0x9354a","0x98bc2","0x72b34","0xd4cd","0x70538","0xcf54","0x8f8d5","0x6e54a","0x98c91","0x126d1","0x714da","0x995ad","0xffc3","0x92d79","0x6ddf9","0x12833","0xdeb0","0xe9d5","0xd166","0x95821","0x11ce7","0x98513","0x6e305","0x94759","0x949d8","0x92762","0x70871","0x98a34","0x98b1f","0x91e45","0x93104","0x712da","0x91404","0xf6ca","0xde35","0x1250f","0x951c7","0x989c8","0x963c7","0x7277b","0x92893","0x6f43c","0x98bc5","0xf56f","0x91bac","0x8fdfd","0x93358","0x1261e","0x98bcd","0x12600","0x11beb","0x937b8","0xfda3","0x7059e","0x6ff8f","0x98c78","0x1a0a01","0x922b2","0x706b6","0xdf45","0x7259a","0x98c83","0x95860","0x1112c","0x6b390","0x10734","0x6ffcd","0x114ca","0xf9d1","0x98ecc","0x70c10","0x92e95","0x125ee","0x944d0","0x92463","0x8fa8e","0x103d5","0xe8c0","0x92fab","0x72eaa","0x94575","0x78a66","0x98af9","0x919f1","0x732f2","0x93d03","0x911b6","0x12514","0xfd9f","0x9934c","0x9852a","0x11127","0x6f33b","0x95158","0x707d4","0xe205","0x8f67c","0x6ec6a","0x72786","0x91caa","0xfa57","0x1119c","0x11c67","0x72d64","0x91c50","0x93e51","0x1a0762","0x99516","0x92062","0x6e599","0x98424","0x11b3a","0x8f930","0x11b70","0x71c6f","0x9567b","0x129bc","0x716c5","0x6d2f8","0x9a051","0x948a5","0x113a4","0x7011f","0x9420a","0xf8fd","0x11735","0x98cc9","0x11896","0x1190a","0x70548","0x989db","0x9a162","0x6c0c9","0xfd0d","0x939c0","0x78b2f","0x1292e","0x71946","0x1094d","0x993bc","0x1067d","0xef91","0x945bf","0x9a26b","0x72791","0x9120c","0x6ef82","0x98a22","0x90605","0xd346","0x11767","0x943b4","0xdecb","0x78c0b","0x6cdd3","0x933c7","0x72206","0x99307","0xeb6a","0xcf9a","0x9127d","0x8fe0f","0xef19","0x11976","0x116ed","0x10613","0x9847b","0x1129c","0x10e23","0x6f3a0","0x9a1cd","0x921cd","0x923f3","0x944de","0x6abc2","0x9a36d","0xfffb","0x71a05","0x98588","0x104eb","0xdc35","0x91c13","0x8f7c3","0xfa7b","0xf866","0x72e56","0xe561","0xd6a3","0x96a58","0x108f3","0xec7d","0x6cc97","0x984cb","0x944ec","0x6ae51","0x93069","0xcef8","0x96d10","0x112f4","0x739b2","0x127e1","0x9470c","0x1a0fcd","0x94f94","0x91278","0x123d7","0x12822","0x12606","0x9a04e","0x923e0","0x10691","0x91169","0x6c16f","0x102db","0x93dba","0x99412","0x8fa23","0x6fa29","0x11198","0x98ae0","0xfa7f","0x127bd","0x918b8","0x6b87f","0x93344","0x91bb1","0xfb7f","0x6f453","0x9134f","0x6ca63","0x932c8","0x93546","0x12166","0x701fa","0x9047b","0x94b48","0x91d84","0x6f4d2","0x942cc","0x94ab8","0x94234","0x964e7","0xeb13","0x6fed8","0x10aef","0x6e79a","0x95552","0x6ec8c","0xdaf9","0x6f2b2","0x911ff","0x6e355","0x921a0","0x902a7","0xd557","0x7211d","0xf056","0x9a32a","0x73ea8","0x78bf0","0x78b59","0x125fb","0x91f3a","0x6eaeb","0x9833a","0x9a016","0x11014","0x70d09","0x1232b","0x70e0e","0xcf13","0x936ea","0x73788","0xed69","0x7036a","0x9a2d7","0x913cd","0x6d7f0","0x72cff","0x9178e","0x128a9","0x91c72","0x9007b","0x98b08","0x70a06","0x957eb","0xec70","0x72d29","0x9444e","0x94d24","0x9562a","0x118ff","0x8f7ab","0x1150e","0x11466","0x6f787","0x99354","0xfce8","0xd2f8","0xeebe","0x71d22","0x943c5","0x98379"],"tid":"1785864.1","unregisterTime":2160796743.709309}],"pages":[],"profilerOverhead":[],"counters":[]} \ No newline at end of file diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Encoding.Signature.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Encoding.Signature.fst index 974a66ac7..3c0ff240a 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Encoding.Signature.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Encoding.Signature.fst @@ -198,13 +198,12 @@ let impl__deserialize <: Libcrux_ml_dsa.Types.t_VerificationError) <: - Core.Result.t_Result - (Libcrux_ml_dsa.Types.t_Signature v_SIMDUnit v_COMMITMENT_HASH_SIZE v_COLUMNS_IN_A v_ROWS_IN_A - ) Libcrux_ml_dsa.Types.t_VerificationError + Core.Result.t_Result (t_Signature v_SIMDUnit v_COMMITMENT_HASH_SIZE v_COLUMNS_IN_A v_ROWS_IN_A) + Libcrux_ml_dsa.Types.t_VerificationError else Core.Result.Result_Ok ({ - Libcrux_ml_dsa.Types.f_commitment_hash + f_commitment_hash = Core.Result.impl__unwrap #(t_Array u8 v_COMMITMENT_HASH_SIZE) #Core.Array.t_TryFromSliceError @@ -214,15 +213,14 @@ let impl__deserialize commitment_hash <: Core.Result.t_Result (t_Array u8 v_COMMITMENT_HASH_SIZE) Core.Array.t_TryFromSliceError); - Libcrux_ml_dsa.Types.f_signer_response = signer_response; - Libcrux_ml_dsa.Types.f_hint = hint + f_signer_response = signer_response; + f_hint = hint } <: - Libcrux_ml_dsa.Types.t_Signature v_SIMDUnit v_COMMITMENT_HASH_SIZE v_COLUMNS_IN_A v_ROWS_IN_A) + t_Signature v_SIMDUnit v_COMMITMENT_HASH_SIZE v_COLUMNS_IN_A v_ROWS_IN_A) <: - Core.Result.t_Result - (Libcrux_ml_dsa.Types.t_Signature v_SIMDUnit v_COMMITMENT_HASH_SIZE v_COLUMNS_IN_A v_ROWS_IN_A - ) Libcrux_ml_dsa.Types.t_VerificationError + Core.Result.t_Result (t_Signature v_SIMDUnit v_COMMITMENT_HASH_SIZE v_COLUMNS_IN_A v_ROWS_IN_A) + Libcrux_ml_dsa.Types.t_VerificationError let impl__serialize (#v_SIMDUnit: Type0) @@ -231,11 +229,7 @@ let impl__serialize (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit) - (self: - Libcrux_ml_dsa.Types.t_Signature v_SIMDUnit - v_COMMITMENT_HASH_SIZE - v_COLUMNS_IN_A - v_ROWS_IN_A) + (self: t_Signature v_SIMDUnit v_COMMITMENT_HASH_SIZE v_COLUMNS_IN_A v_ROWS_IN_A) = let signature:t_Array u8 v_SIGNATURE_SIZE = Rust_primitives.Hax.repeat 0uy v_SIGNATURE_SIZE in let offset:usize = sz 0 in @@ -256,7 +250,7 @@ let impl__serialize Core.Ops.Range.t_Range usize ] <: t_Slice u8) - (self.Libcrux_ml_dsa.Types.f_commitment_hash <: t_Slice u8) + (self.f_commitment_hash <: t_Slice u8) <: t_Slice u8) in @@ -292,7 +286,7 @@ let impl__serialize (Libcrux_ml_dsa.Encoding.Gamma1.serialize #v_SIMDUnit v_GAMMA1_EXPONENT v_GAMMA1_RING_ELEMENT_SIZE - (self.Libcrux_ml_dsa.Types.f_signer_response.[ i ] + (self.f_signer_response.[ i ] <: Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) <: @@ -316,18 +310,17 @@ let impl__serialize let signature, true_hints_seen:(t_Array u8 v_SIGNATURE_SIZE & usize) = temp_0_ in let i:usize = i in let signature, true_hints_seen:(t_Array u8 v_SIGNATURE_SIZE & usize) = - Rust_primitives.Hax.Folds.fold_enumerated_slice (self.Libcrux_ml_dsa.Types.f_hint.[ i ] - <: - t_Array i32 (sz 256)) + Rust_primitives.Hax.Folds.fold_range (sz 0) + (Core.Slice.impl__len #i32 (self.f_hint.[ i ] <: t_Slice i32) <: usize) (fun temp_0_ temp_1_ -> let signature, true_hints_seen:(t_Array u8 v_SIGNATURE_SIZE & usize) = temp_0_ in let _:usize = temp_1_ in true) (signature, true_hints_seen <: (t_Array u8 v_SIGNATURE_SIZE & usize)) - (fun temp_0_ temp_1_ -> + (fun temp_0_ j -> let signature, true_hints_seen:(t_Array u8 v_SIGNATURE_SIZE & usize) = temp_0_ in - let j, hint:(usize & i32) = temp_1_ in - if hint =. 1l <: bool + let j:usize = j in + if ((self.f_hint.[ i ] <: t_Array i32 (sz 256)).[ j ] <: i32) =. 1l <: bool then let signature:t_Array u8 v_SIGNATURE_SIZE = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize signature diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Encoding.Signature.fsti b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Encoding.Signature.fsti index 946d0fb21..0ef8c6563 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Encoding.Signature.fsti +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Encoding.Signature.fsti @@ -9,6 +9,18 @@ let _ = let open Libcrux_ml_dsa.Simd.Traits in () +/// A signature +/// This is only an internal type. +type t_Signature + (v_SIMDUnit: Type0) (v_COMMITMENT_HASH_SIZE: usize) (v_COLUMNS_IN_A: usize) (v_ROWS_IN_A: usize) + {| i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} + = { + f_commitment_hash:t_Array u8 v_COMMITMENT_HASH_SIZE; + f_signer_response:t_Array (Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) + v_COLUMNS_IN_A; + f_hint:t_Array (t_Array i32 (sz 256)) v_ROWS_IN_A +} + val impl__deserialize (#v_SIMDUnit: Type0) (v_COMMITMENT_HASH_SIZE v_COLUMNS_IN_A v_ROWS_IN_A v_GAMMA1_EXPONENT v_GAMMA1_RING_ELEMENT_SIZE v_MAX_ONES_IN_HINT v_SIGNATURE_SIZE: @@ -17,21 +29,13 @@ val impl__deserialize (serialized: t_Array u8 v_SIGNATURE_SIZE) : Prims.Pure (Core.Result.t_Result - (Libcrux_ml_dsa.Types.t_Signature v_SIMDUnit - v_COMMITMENT_HASH_SIZE - v_COLUMNS_IN_A - v_ROWS_IN_A) Libcrux_ml_dsa.Types.t_VerificationError) - Prims.l_True - (fun _ -> Prims.l_True) + (t_Signature v_SIMDUnit v_COMMITMENT_HASH_SIZE v_COLUMNS_IN_A v_ROWS_IN_A) + Libcrux_ml_dsa.Types.t_VerificationError) Prims.l_True (fun _ -> Prims.l_True) val impl__serialize (#v_SIMDUnit: Type0) (v_COMMITMENT_HASH_SIZE v_COLUMNS_IN_A v_ROWS_IN_A v_GAMMA1_EXPONENT v_GAMMA1_RING_ELEMENT_SIZE v_MAX_ONES_IN_HINT v_SIGNATURE_SIZE: usize) {| i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} - (self: - Libcrux_ml_dsa.Types.t_Signature v_SIMDUnit - v_COMMITMENT_HASH_SIZE - v_COLUMNS_IN_A - v_ROWS_IN_A) + (self: t_Signature v_SIMDUnit v_COMMITMENT_HASH_SIZE v_COLUMNS_IN_A v_ROWS_IN_A) : Prims.Pure (t_Array u8 v_SIGNATURE_SIZE) Prims.l_True (fun _ -> Prims.l_True) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Hash_functions.Neon.fsti b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Hash_functions.Neon.fsti index 9ad6829f1..a7762dfe1 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Hash_functions.Neon.fsti +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Hash_functions.Neon.fsti @@ -8,6 +8,12 @@ val t_Shake128x4:Type0 /// Neon SHAKE 256 x4 state val t_Shake256x4:Type0 +[@@ FStar.Tactics.Typeclasses.tcinstance] +val impl:Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 t_Shake128x4 + +[@@ FStar.Tactics.Typeclasses.tcinstance] +val impl_1:Libcrux_ml_dsa.Hash_functions.Shake256.t_XofX4 t_Shake256x4 + /// Init the state and absorb 4 blocks in parallel. val init_absorb (input0 input1 input2 input3: t_Slice u8) : Prims.Pure t_Shake128x4 Prims.l_True (fun _ -> Prims.l_True) @@ -43,239 +49,9 @@ val squeeze_next_block (state: t_Shake128x4) Prims.l_True (fun _ -> Prims.l_True) -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl: Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 t_Shake128x4 = - { - f_init_absorb_pre - = - (fun (input0: t_Slice u8) (input1: t_Slice u8) (input2: t_Slice u8) (input3: t_Slice u8) -> true - ); - f_init_absorb_post - = - (fun - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out: t_Shake128x4) - -> - true); - f_init_absorb - = - (fun (input0: t_Slice u8) (input1: t_Slice u8) (input2: t_Slice u8) (input3: t_Slice u8) -> - init_absorb input0 input1 input2 input3); - f_squeeze_first_five_blocks_pre - = - (fun - (self: t_Shake128x4) - (out0: t_Array u8 (sz 840)) - (out1: t_Array u8 (sz 840)) - (out2: t_Array u8 (sz 840)) - (out3: t_Array u8 (sz 840)) - -> - true); - f_squeeze_first_five_blocks_post - = - (fun - (self: t_Shake128x4) - (out0: t_Array u8 (sz 840)) - (out1: t_Array u8 (sz 840)) - (out2: t_Array u8 (sz 840)) - (out3: t_Array u8 (sz 840)) - (out4: - (t_Shake128x4 & t_Array u8 (sz 840) & t_Array u8 (sz 840) & t_Array u8 (sz 840) & - t_Array u8 (sz 840))) - -> - true); - f_squeeze_first_five_blocks - = - (fun - (self: t_Shake128x4) - (out0: t_Array u8 (sz 840)) - (out1: t_Array u8 (sz 840)) - (out2: t_Array u8 (sz 840)) - (out3: t_Array u8 (sz 840)) - -> - let tmp0, tmp1, tmp2, tmp3, tmp4:(t_Shake128x4 & t_Array u8 (sz 840) & t_Array u8 (sz 840) & - t_Array u8 (sz 840) & - t_Array u8 (sz 840)) = - squeeze_first_five_blocks self out0 out1 out2 out3 - in - let self:t_Shake128x4 = tmp0 in - let out0:t_Array u8 (sz 840) = tmp1 in - let out1:t_Array u8 (sz 840) = tmp2 in - let out2:t_Array u8 (sz 840) = tmp3 in - let out3:t_Array u8 (sz 840) = tmp4 in - let _:Prims.unit = () in - self, out0, out1, out2, out3 - <: - (t_Shake128x4 & t_Array u8 (sz 840) & t_Array u8 (sz 840) & t_Array u8 (sz 840) & - t_Array u8 (sz 840))); - f_squeeze_next_block_pre = (fun (self: t_Shake128x4) -> true); - f_squeeze_next_block_post - = - (fun - (self: t_Shake128x4) - (out5: - (t_Shake128x4 & - (t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168))) - ) - -> - true); - f_squeeze_next_block - = - fun (self: t_Shake128x4) -> - let tmp0, out4:(t_Shake128x4 & - (t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168))) = - squeeze_next_block self - in - let self:t_Shake128x4 = tmp0 in - let hax_temp_output:(t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & - t_Array u8 (sz 168)) = - out4 - in - self, hax_temp_output - <: - (t_Shake128x4 & - (t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168))) - } - val squeeze_next_block_x4 (state: t_Shake256x4) : Prims.Pure (t_Shake256x4 & (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) Prims.l_True (fun _ -> Prims.l_True) - -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl_1: Libcrux_ml_dsa.Hash_functions.Shake256.t_XofX4 t_Shake256x4 = - { - f_init_absorb_x4_pre - = - (fun (input0: t_Slice u8) (input1: t_Slice u8) (input2: t_Slice u8) (input3: t_Slice u8) -> true - ); - f_init_absorb_x4_post - = - (fun - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out: t_Shake256x4) - -> - true); - f_init_absorb_x4 - = - (fun (input0: t_Slice u8) (input1: t_Slice u8) (input2: t_Slice u8) (input3: t_Slice u8) -> - init_absorb_x4 input0 input1 input2 input3); - f_squeeze_first_block_x4_pre = (fun (self: t_Shake256x4) -> true); - f_squeeze_first_block_x4_post - = - (fun - (self: t_Shake256x4) - (out5: - (t_Shake256x4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) - ) - -> - true); - f_squeeze_first_block_x4 - = - (fun (self: t_Shake256x4) -> - let tmp0, out4:(t_Shake256x4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) = - squeeze_first_block_x4 self - in - let self:t_Shake256x4 = tmp0 in - let hax_temp_output:(t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & - t_Array u8 (sz 136)) = - out4 - in - self, hax_temp_output - <: - (t_Shake256x4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136)))); - f_squeeze_next_block_x4_pre = (fun (self: t_Shake256x4) -> true); - f_squeeze_next_block_x4_post - = - (fun - (self: t_Shake256x4) - (out5: - (t_Shake256x4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) - ) - -> - true); - f_squeeze_next_block_x4 - = - (fun (self: t_Shake256x4) -> - let tmp0, out4:(t_Shake256x4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) = - squeeze_next_block_x4 self - in - let self:t_Shake256x4 = tmp0 in - let hax_temp_output:(t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & - t_Array u8 (sz 136)) = - out4 - in - self, hax_temp_output - <: - (t_Shake256x4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136)))); - f_shake256_x4_pre - = - (fun - (v_OUT_LEN: usize) - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out0: t_Array u8 v_OUT_LEN) - (out1: t_Array u8 v_OUT_LEN) - (out2: t_Array u8 v_OUT_LEN) - (out3: t_Array u8 v_OUT_LEN) - -> - true); - f_shake256_x4_post - = - (fun - (v_OUT_LEN: usize) - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out0: t_Array u8 v_OUT_LEN) - (out1: t_Array u8 v_OUT_LEN) - (out2: t_Array u8 v_OUT_LEN) - (out3: t_Array u8 v_OUT_LEN) - (out4: - (t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN - )) - -> - true); - f_shake256_x4 - = - fun - (v_OUT_LEN: usize) - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out0: t_Array u8 v_OUT_LEN) - (out1: t_Array u8 v_OUT_LEN) - (out2: t_Array u8 v_OUT_LEN) - (out3: t_Array u8 v_OUT_LEN) - -> - let tmp0, tmp1, tmp2, tmp3:(t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & - t_Array u8 v_OUT_LEN) = - shake256_x4 v_OUT_LEN input0 input1 input2 input3 out0 out1 out2 out3 - in - let out0:t_Array u8 v_OUT_LEN = tmp0 in - let out1:t_Array u8 v_OUT_LEN = tmp1 in - let out2:t_Array u8 v_OUT_LEN = tmp2 in - let out3:t_Array u8 v_OUT_LEN = tmp3 in - let _:Prims.unit = () in - out0, out1, out2, out3 - <: - (t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN) - } diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Hash_functions.Portable.fsti b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Hash_functions.Portable.fsti index c1b251529..0a59a5cc8 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Hash_functions.Portable.fsti +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Hash_functions.Portable.fsti @@ -21,6 +21,18 @@ val t_Shake256Absorb:Type0 val t_Shake256Squeeze:Type0 +[@@ FStar.Tactics.Typeclasses.tcinstance] +val impl:Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 t_Shake128X4 + +[@@ FStar.Tactics.Typeclasses.tcinstance] +val impl_1:Libcrux_ml_dsa.Hash_functions.Shake128.t_Xof t_Shake128 + +[@@ FStar.Tactics.Typeclasses.tcinstance] +val impl_2:Libcrux_ml_dsa.Hash_functions.Shake256.t_Xof t_Shake256 + +[@@ FStar.Tactics.Typeclasses.tcinstance] +val impl_3:Libcrux_ml_dsa.Hash_functions.Shake256.t_XofX4 t_Shake256X4 + val init_absorb (input0 input1 input2 input3: t_Slice u8) : Prims.Pure t_Shake128X4 Prims.l_True (fun _ -> Prims.l_True) @@ -33,28 +45,6 @@ val init_absorb_x4 (input0 input1 input2 input3: t_Slice u8) val shake128 (v_OUTPUT_LENGTH: usize) (input: t_Slice u8) (out: t_Array u8 v_OUTPUT_LENGTH) : Prims.Pure (t_Array u8 v_OUTPUT_LENGTH) Prims.l_True (fun _ -> Prims.l_True) -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl_1: Libcrux_ml_dsa.Hash_functions.Shake128.t_Xof t_Shake128 = - { - f_shake128_pre - = - (fun (v_OUTPUT_LENGTH: usize) (input: t_Slice u8) (out: t_Array u8 v_OUTPUT_LENGTH) -> true); - f_shake128_post - = - (fun - (v_OUTPUT_LENGTH: usize) - (input: t_Slice u8) - (out: t_Array u8 v_OUTPUT_LENGTH) - (out1: t_Array u8 v_OUTPUT_LENGTH) - -> - true); - f_shake128 - = - fun (v_OUTPUT_LENGTH: usize) (input: t_Slice u8) (out: t_Array u8 v_OUTPUT_LENGTH) -> - let out:t_Array u8 v_OUTPUT_LENGTH = shake128 v_OUTPUT_LENGTH input out in - out - } - val shake256 (v_OUTPUT_LENGTH: usize) (input: t_Slice u8) (out: t_Array u8 v_OUTPUT_LENGTH) : Prims.Pure (t_Array u8 v_OUTPUT_LENGTH) Prims.l_True (fun _ -> Prims.l_True) @@ -91,284 +81,12 @@ val squeeze_next_block (state: t_Shake128X4) Prims.l_True (fun _ -> Prims.l_True) -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl: Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 t_Shake128X4 = - { - f_init_absorb_pre - = - (fun (input0: t_Slice u8) (input1: t_Slice u8) (input2: t_Slice u8) (input3: t_Slice u8) -> true - ); - f_init_absorb_post - = - (fun - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out: t_Shake128X4) - -> - true); - f_init_absorb - = - (fun (input0: t_Slice u8) (input1: t_Slice u8) (input2: t_Slice u8) (input3: t_Slice u8) -> - init_absorb input0 input1 input2 input3); - f_squeeze_first_five_blocks_pre - = - (fun - (self: t_Shake128X4) - (out0: t_Array u8 (sz 840)) - (out1: t_Array u8 (sz 840)) - (out2: t_Array u8 (sz 840)) - (out3: t_Array u8 (sz 840)) - -> - true); - f_squeeze_first_five_blocks_post - = - (fun - (self: t_Shake128X4) - (out0: t_Array u8 (sz 840)) - (out1: t_Array u8 (sz 840)) - (out2: t_Array u8 (sz 840)) - (out3: t_Array u8 (sz 840)) - (out4: - (t_Shake128X4 & t_Array u8 (sz 840) & t_Array u8 (sz 840) & t_Array u8 (sz 840) & - t_Array u8 (sz 840))) - -> - true); - f_squeeze_first_five_blocks - = - (fun - (self: t_Shake128X4) - (out0: t_Array u8 (sz 840)) - (out1: t_Array u8 (sz 840)) - (out2: t_Array u8 (sz 840)) - (out3: t_Array u8 (sz 840)) - -> - let tmp0, tmp1, tmp2, tmp3, tmp4:(t_Shake128X4 & t_Array u8 (sz 840) & t_Array u8 (sz 840) & - t_Array u8 (sz 840) & - t_Array u8 (sz 840)) = - squeeze_first_five_blocks self out0 out1 out2 out3 - in - let self:t_Shake128X4 = tmp0 in - let out0:t_Array u8 (sz 840) = tmp1 in - let out1:t_Array u8 (sz 840) = tmp2 in - let out2:t_Array u8 (sz 840) = tmp3 in - let out3:t_Array u8 (sz 840) = tmp4 in - let _:Prims.unit = () in - self, out0, out1, out2, out3 - <: - (t_Shake128X4 & t_Array u8 (sz 840) & t_Array u8 (sz 840) & t_Array u8 (sz 840) & - t_Array u8 (sz 840))); - f_squeeze_next_block_pre = (fun (self: t_Shake128X4) -> true); - f_squeeze_next_block_post - = - (fun - (self: t_Shake128X4) - (out5: - (t_Shake128X4 & - (t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168))) - ) - -> - true); - f_squeeze_next_block - = - fun (self: t_Shake128X4) -> - let tmp0, out4:(t_Shake128X4 & - (t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168))) = - squeeze_next_block self - in - let self:t_Shake128X4 = tmp0 in - let hax_temp_output:(t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & - t_Array u8 (sz 168)) = - out4 - in - self, hax_temp_output - <: - (t_Shake128X4 & - (t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168))) - } - val squeeze_next_block_shake256 (state: t_Shake256) : Prims.Pure (t_Shake256 & t_Array u8 (sz 136)) Prims.l_True (fun _ -> Prims.l_True) -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl_2: Libcrux_ml_dsa.Hash_functions.Shake256.t_Xof t_Shake256 = - { - f_shake256_pre - = - (fun (v_OUTPUT_LENGTH: usize) (input: t_Slice u8) (out: t_Array u8 v_OUTPUT_LENGTH) -> true); - f_shake256_post - = - (fun - (v_OUTPUT_LENGTH: usize) - (input: t_Slice u8) - (out: t_Array u8 v_OUTPUT_LENGTH) - (out1: t_Array u8 v_OUTPUT_LENGTH) - -> - true); - f_shake256 - = - (fun (v_OUTPUT_LENGTH: usize) (input: t_Slice u8) (out: t_Array u8 v_OUTPUT_LENGTH) -> - let out:t_Array u8 v_OUTPUT_LENGTH = shake256 v_OUTPUT_LENGTH input out in - out); - f_init_absorb_pre = (fun (input: t_Slice u8) -> true); - f_init_absorb_post = (fun (input: t_Slice u8) (out: t_Shake256) -> true); - f_init_absorb = (fun (input: t_Slice u8) -> init_absorb_shake256 input); - f_squeeze_first_block_pre = (fun (self: t_Shake256) -> true); - f_squeeze_first_block_post - = - (fun (self: t_Shake256) (out2: (t_Shake256 & t_Array u8 (sz 136))) -> true); - f_squeeze_first_block - = - (fun (self: t_Shake256) -> - let tmp0, out1:(t_Shake256 & t_Array u8 (sz 136)) = squeeze_first_block_shake256 self in - let self:t_Shake256 = tmp0 in - let hax_temp_output:t_Array u8 (sz 136) = out1 in - self, hax_temp_output <: (t_Shake256 & t_Array u8 (sz 136))); - f_squeeze_next_block_pre = (fun (self: t_Shake256) -> true); - f_squeeze_next_block_post - = - (fun (self: t_Shake256) (out2: (t_Shake256 & t_Array u8 (sz 136))) -> true); - f_squeeze_next_block - = - fun (self: t_Shake256) -> - let tmp0, out1:(t_Shake256 & t_Array u8 (sz 136)) = squeeze_next_block_shake256 self in - let self:t_Shake256 = tmp0 in - let hax_temp_output:t_Array u8 (sz 136) = out1 in - self, hax_temp_output <: (t_Shake256 & t_Array u8 (sz 136)) - } - val squeeze_next_block_x4 (state: t_Shake256X4) : Prims.Pure (t_Shake256X4 & (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) Prims.l_True (fun _ -> Prims.l_True) - -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl_3: Libcrux_ml_dsa.Hash_functions.Shake256.t_XofX4 t_Shake256X4 = - { - f_init_absorb_x4_pre - = - (fun (input0: t_Slice u8) (input1: t_Slice u8) (input2: t_Slice u8) (input3: t_Slice u8) -> true - ); - f_init_absorb_x4_post - = - (fun - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out: t_Shake256X4) - -> - true); - f_init_absorb_x4 - = - (fun (input0: t_Slice u8) (input1: t_Slice u8) (input2: t_Slice u8) (input3: t_Slice u8) -> - init_absorb_x4 input0 input1 input2 input3); - f_squeeze_first_block_x4_pre = (fun (self: t_Shake256X4) -> true); - f_squeeze_first_block_x4_post - = - (fun - (self: t_Shake256X4) - (out5: - (t_Shake256X4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) - ) - -> - true); - f_squeeze_first_block_x4 - = - (fun (self: t_Shake256X4) -> - let tmp0, out4:(t_Shake256X4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) = - squeeze_first_block_x4 self - in - let self:t_Shake256X4 = tmp0 in - let hax_temp_output:(t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & - t_Array u8 (sz 136)) = - out4 - in - self, hax_temp_output - <: - (t_Shake256X4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136)))); - f_squeeze_next_block_x4_pre = (fun (self: t_Shake256X4) -> true); - f_squeeze_next_block_x4_post - = - (fun - (self: t_Shake256X4) - (out5: - (t_Shake256X4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) - ) - -> - true); - f_squeeze_next_block_x4 - = - (fun (self: t_Shake256X4) -> - let tmp0, out4:(t_Shake256X4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) = - squeeze_next_block_x4 self - in - let self:t_Shake256X4 = tmp0 in - let hax_temp_output:(t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & - t_Array u8 (sz 136)) = - out4 - in - self, hax_temp_output - <: - (t_Shake256X4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136)))); - f_shake256_x4_pre - = - (fun - (v_OUT_LEN: usize) - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out0: t_Array u8 v_OUT_LEN) - (out1: t_Array u8 v_OUT_LEN) - (out2: t_Array u8 v_OUT_LEN) - (out3: t_Array u8 v_OUT_LEN) - -> - true); - f_shake256_x4_post - = - (fun - (v_OUT_LEN: usize) - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out0: t_Array u8 v_OUT_LEN) - (out1: t_Array u8 v_OUT_LEN) - (out2: t_Array u8 v_OUT_LEN) - (out3: t_Array u8 v_OUT_LEN) - (out4: - (t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN - )) - -> - true); - f_shake256_x4 - = - fun - (v_OUT_LEN: usize) - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out0: t_Array u8 v_OUT_LEN) - (out1: t_Array u8 v_OUT_LEN) - (out2: t_Array u8 v_OUT_LEN) - (out3: t_Array u8 v_OUT_LEN) - -> - let out0:t_Array u8 v_OUT_LEN = shake256 v_OUT_LEN input0 out0 in - let out1:t_Array u8 v_OUT_LEN = shake256 v_OUT_LEN input1 out1 in - let out2:t_Array u8 v_OUT_LEN = shake256 v_OUT_LEN input2 out2 in - let out3:t_Array u8 v_OUT_LEN = shake256 v_OUT_LEN input3 out3 in - out0, out1, out2, out3 - <: - (t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN) - } diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Hash_functions.Simd256.fsti b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Hash_functions.Simd256.fsti index 97db532b4..32174758b 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Hash_functions.Simd256.fsti +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Hash_functions.Simd256.fsti @@ -14,6 +14,15 @@ val t_Shake256x4:Type0 /// AVX2 SHAKE 256 state val t_Shake256:Type0 +[@@ FStar.Tactics.Typeclasses.tcinstance] +val impl:Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 t_Shake128x4 + +[@@ FStar.Tactics.Typeclasses.tcinstance] +val impl_1:Libcrux_ml_dsa.Hash_functions.Shake256.t_Xof t_Shake256 + +[@@ FStar.Tactics.Typeclasses.tcinstance] +val impl_2:Libcrux_ml_dsa.Hash_functions.Shake256.t_XofX4 t_Shake256x4 + /// Init the state and absorb 4 blocks in parallel. val init_absorb (input0 input1 input2 input3: t_Slice u8) : Prims.Pure t_Shake128x4 Prims.l_True (fun _ -> Prims.l_True) @@ -58,291 +67,12 @@ val squeeze_next_block (state: t_Shake128x4) Prims.l_True (fun _ -> Prims.l_True) -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl: Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 t_Shake128x4 = - { - f_init_absorb_pre - = - (fun (input0: t_Slice u8) (input1: t_Slice u8) (input2: t_Slice u8) (input3: t_Slice u8) -> true - ); - f_init_absorb_post - = - (fun - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out: t_Shake128x4) - -> - true); - f_init_absorb - = - (fun (input0: t_Slice u8) (input1: t_Slice u8) (input2: t_Slice u8) (input3: t_Slice u8) -> - init_absorb input0 input1 input2 input3); - f_squeeze_first_five_blocks_pre - = - (fun - (self: t_Shake128x4) - (out0: t_Array u8 (sz 840)) - (out1: t_Array u8 (sz 840)) - (out2: t_Array u8 (sz 840)) - (out3: t_Array u8 (sz 840)) - -> - true); - f_squeeze_first_five_blocks_post - = - (fun - (self: t_Shake128x4) - (out0: t_Array u8 (sz 840)) - (out1: t_Array u8 (sz 840)) - (out2: t_Array u8 (sz 840)) - (out3: t_Array u8 (sz 840)) - (out4: - (t_Shake128x4 & t_Array u8 (sz 840) & t_Array u8 (sz 840) & t_Array u8 (sz 840) & - t_Array u8 (sz 840))) - -> - true); - f_squeeze_first_five_blocks - = - (fun - (self: t_Shake128x4) - (out0: t_Array u8 (sz 840)) - (out1: t_Array u8 (sz 840)) - (out2: t_Array u8 (sz 840)) - (out3: t_Array u8 (sz 840)) - -> - let tmp0, tmp1, tmp2, tmp3, tmp4:(t_Shake128x4 & t_Array u8 (sz 840) & t_Array u8 (sz 840) & - t_Array u8 (sz 840) & - t_Array u8 (sz 840)) = - squeeze_first_five_blocks self out0 out1 out2 out3 - in - let self:t_Shake128x4 = tmp0 in - let out0:t_Array u8 (sz 840) = tmp1 in - let out1:t_Array u8 (sz 840) = tmp2 in - let out2:t_Array u8 (sz 840) = tmp3 in - let out3:t_Array u8 (sz 840) = tmp4 in - let _:Prims.unit = () in - self, out0, out1, out2, out3 - <: - (t_Shake128x4 & t_Array u8 (sz 840) & t_Array u8 (sz 840) & t_Array u8 (sz 840) & - t_Array u8 (sz 840))); - f_squeeze_next_block_pre = (fun (self: t_Shake128x4) -> true); - f_squeeze_next_block_post - = - (fun - (self: t_Shake128x4) - (out5: - (t_Shake128x4 & - (t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168))) - ) - -> - true); - f_squeeze_next_block - = - fun (self: t_Shake128x4) -> - let tmp0, out4:(t_Shake128x4 & - (t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168))) = - squeeze_next_block self - in - let self:t_Shake128x4 = tmp0 in - let hax_temp_output:(t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & - t_Array u8 (sz 168)) = - out4 - in - self, hax_temp_output - <: - (t_Shake128x4 & - (t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168))) - } - val squeeze_next_block_shake256 (state: t_Shake256) : Prims.Pure (t_Shake256 & t_Array u8 (sz 136)) Prims.l_True (fun _ -> Prims.l_True) -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl_1: Libcrux_ml_dsa.Hash_functions.Shake256.t_Xof t_Shake256 = - { - f_shake256_pre - = - (fun (v_OUTPUT_LENGTH: usize) (input: t_Slice u8) (out: t_Array u8 v_OUTPUT_LENGTH) -> true); - f_shake256_post - = - (fun - (v_OUTPUT_LENGTH: usize) - (input: t_Slice u8) - (out: t_Array u8 v_OUTPUT_LENGTH) - (out1: t_Array u8 v_OUTPUT_LENGTH) - -> - true); - f_shake256 - = - (fun (v_OUTPUT_LENGTH: usize) (input: t_Slice u8) (out: t_Array u8 v_OUTPUT_LENGTH) -> - let hax_temp_output, out:(Prims.unit & t_Array u8 v_OUTPUT_LENGTH) = - (), shake256 v_OUTPUT_LENGTH input out <: (Prims.unit & t_Array u8 v_OUTPUT_LENGTH) - in - out); - f_init_absorb_pre = (fun (input: t_Slice u8) -> true); - f_init_absorb_post = (fun (input: t_Slice u8) (out: t_Shake256) -> true); - f_init_absorb = (fun (input: t_Slice u8) -> init_absorb_shake256 input); - f_squeeze_first_block_pre = (fun (self: t_Shake256) -> true); - f_squeeze_first_block_post - = - (fun (self: t_Shake256) (out2: (t_Shake256 & t_Array u8 (sz 136))) -> true); - f_squeeze_first_block - = - (fun (self: t_Shake256) -> - let tmp0, out1:(t_Shake256 & t_Array u8 (sz 136)) = squeeze_first_block_shake256 self in - let self:t_Shake256 = tmp0 in - let hax_temp_output:t_Array u8 (sz 136) = out1 in - self, hax_temp_output <: (t_Shake256 & t_Array u8 (sz 136))); - f_squeeze_next_block_pre = (fun (self: t_Shake256) -> true); - f_squeeze_next_block_post - = - (fun (self: t_Shake256) (out2: (t_Shake256 & t_Array u8 (sz 136))) -> true); - f_squeeze_next_block - = - fun (self: t_Shake256) -> - let tmp0, out1:(t_Shake256 & t_Array u8 (sz 136)) = squeeze_next_block_shake256 self in - let self:t_Shake256 = tmp0 in - let hax_temp_output:t_Array u8 (sz 136) = out1 in - self, hax_temp_output <: (t_Shake256 & t_Array u8 (sz 136)) - } - val squeeze_next_block_x4 (state: t_Shake256x4) : Prims.Pure (t_Shake256x4 & (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) Prims.l_True (fun _ -> Prims.l_True) - -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl_2: Libcrux_ml_dsa.Hash_functions.Shake256.t_XofX4 t_Shake256x4 = - { - f_init_absorb_x4_pre - = - (fun (input0: t_Slice u8) (input1: t_Slice u8) (input2: t_Slice u8) (input3: t_Slice u8) -> true - ); - f_init_absorb_x4_post - = - (fun - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out: t_Shake256x4) - -> - true); - f_init_absorb_x4 - = - (fun (input0: t_Slice u8) (input1: t_Slice u8) (input2: t_Slice u8) (input3: t_Slice u8) -> - init_absorb_x4 input0 input1 input2 input3); - f_squeeze_first_block_x4_pre = (fun (self: t_Shake256x4) -> true); - f_squeeze_first_block_x4_post - = - (fun - (self: t_Shake256x4) - (out5: - (t_Shake256x4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) - ) - -> - true); - f_squeeze_first_block_x4 - = - (fun (self: t_Shake256x4) -> - let tmp0, out4:(t_Shake256x4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) = - squeeze_first_block_x4 self - in - let self:t_Shake256x4 = tmp0 in - let hax_temp_output:(t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & - t_Array u8 (sz 136)) = - out4 - in - self, hax_temp_output - <: - (t_Shake256x4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136)))); - f_squeeze_next_block_x4_pre = (fun (self: t_Shake256x4) -> true); - f_squeeze_next_block_x4_post - = - (fun - (self: t_Shake256x4) - (out5: - (t_Shake256x4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) - ) - -> - true); - f_squeeze_next_block_x4 - = - (fun (self: t_Shake256x4) -> - let tmp0, out4:(t_Shake256x4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136))) = - squeeze_next_block_x4 self - in - let self:t_Shake256x4 = tmp0 in - let hax_temp_output:(t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & - t_Array u8 (sz 136)) = - out4 - in - self, hax_temp_output - <: - (t_Shake256x4 & - (t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136) & t_Array u8 (sz 136)))); - f_shake256_x4_pre - = - (fun - (v_OUT_LEN: usize) - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out0: t_Array u8 v_OUT_LEN) - (out1: t_Array u8 v_OUT_LEN) - (out2: t_Array u8 v_OUT_LEN) - (out3: t_Array u8 v_OUT_LEN) - -> - true); - f_shake256_x4_post - = - (fun - (v_OUT_LEN: usize) - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out0: t_Array u8 v_OUT_LEN) - (out1: t_Array u8 v_OUT_LEN) - (out2: t_Array u8 v_OUT_LEN) - (out3: t_Array u8 v_OUT_LEN) - (out4: - (t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN - )) - -> - true); - f_shake256_x4 - = - fun - (v_OUT_LEN: usize) - (input0: t_Slice u8) - (input1: t_Slice u8) - (input2: t_Slice u8) - (input3: t_Slice u8) - (out0: t_Array u8 v_OUT_LEN) - (out1: t_Array u8 v_OUT_LEN) - (out2: t_Array u8 v_OUT_LEN) - (out3: t_Array u8 v_OUT_LEN) - -> - let tmp0, tmp1, tmp2, tmp3:(t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & - t_Array u8 v_OUT_LEN) = - shake256_x4 v_OUT_LEN input0 input1 input2 input3 out0 out1 out2 out3 - in - let out0:t_Array u8 v_OUT_LEN = tmp0 in - let out1:t_Array u8 v_OUT_LEN = tmp1 in - let out2:t_Array u8 v_OUT_LEN = tmp2 in - let out3:t_Array u8 v_OUT_LEN = tmp3 in - let _:Prims.unit = () in - out0, out1, out2, out3 - <: - (t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN & t_Array u8 v_OUT_LEN) - } diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Matrix.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Matrix.fst index 0f4339ffb..2ba6033e2 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Matrix.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Matrix.fst @@ -221,6 +221,17 @@ let compute_As1_plus_s2 Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) v_ROWS_IN_A in + let s1_ntt:t_Array (Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) v_COLUMNS_IN_A = + Core.Array.impl_23__map #(Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) + v_COLUMNS_IN_A + #(Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) + s1 + (fun s -> + let s:Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit = s in + Libcrux_ml_dsa.Ntt.ntt #v_SIMDUnit s + <: + Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) + in let result:t_Array (Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) v_ROWS_IN_A = Rust_primitives.Hax.Folds.fold_enumerated_slice (v_A_as_ntt <: @@ -268,10 +279,7 @@ let compute_As1_plus_s2 let product:Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit = Libcrux_ml_dsa.Ntt.ntt_multiply_montgomery #v_SIMDUnit ring_element - (Libcrux_ml_dsa.Ntt.ntt #v_SIMDUnit - (s1.[ j ] <: Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) - <: - Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) + (s1_ntt.[ j ] <: Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) in let result:t_Array (Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) v_ROWS_IN_A = diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.Avx2.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.Avx2.fst index e68b8fe9b..57daef3c6 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.Avx2.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.Avx2.fst @@ -14,16 +14,10 @@ let generate_key_pair (randomness: t_Array u8 (sz 32)) = randomness in { - Libcrux_ml_dsa.Types.f_signing_key - = - Libcrux_ml_dsa.Types.MLDSASigningKey signing_key - <: - Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 2560); + Libcrux_ml_dsa.Types.f_signing_key = Libcrux_ml_dsa.Types.impl__new (sz 2560) signing_key; Libcrux_ml_dsa.Types.f_verification_key = - Libcrux_ml_dsa.Types.MLDSAVerificationKey verification_key - <: - Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1312) + Libcrux_ml_dsa.Types.impl_2__new (sz 1312) verification_key } <: Libcrux_ml_dsa.Types.t_MLDSAKeyPair (sz 1312) (sz 2560) @@ -35,7 +29,8 @@ let sign = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Avx2.sign (sz 4) (sz 4) (sz 2) (sz 96) (sz 17) 95232l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) (sz 576) (sz 2560) (sz 2420) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 2560) signing_key <: t_Array u8 (sz 2560)) message + context randomness let sign_pre_hashed_shake128 (signing_key: Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 2560)) @@ -44,7 +39,8 @@ let sign_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Avx2.sign_pre_hashed_shake128 (sz 4) (sz 4) (sz 2) (sz 96) (sz 17) 95232l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) (sz 576) (sz 2560) (sz 2420) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 2560) signing_key <: t_Array u8 (sz 2560)) message + context randomness let verify (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1312)) @@ -53,7 +49,8 @@ let verify = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Avx2.verify (sz 4) (sz 4) (sz 2420) (sz 1312) (sz 17) (sz 576) 95232l 78l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1312) verification_key <: t_Array u8 (sz 1312)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 2420) signature <: t_Array u8 (sz 2420)) let verify_pre_hashed_shake128 (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1312)) @@ -62,4 +59,5 @@ let verify_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Avx2.verify_pre_hashed_shake128 (sz 4) (sz 4) (sz 2420) (sz 1312) (sz 17) (sz 576) 95232l 78l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1312) verification_key <: t_Array u8 (sz 1312)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 2420) signature <: t_Array u8 (sz 2420)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.Neon.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.Neon.fst index f27fbeff4..881529d16 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.Neon.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.Neon.fst @@ -14,16 +14,10 @@ let generate_key_pair (randomness: t_Array u8 (sz 32)) = randomness in { - Libcrux_ml_dsa.Types.f_signing_key - = - Libcrux_ml_dsa.Types.MLDSASigningKey signing_key - <: - Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 2560); + Libcrux_ml_dsa.Types.f_signing_key = Libcrux_ml_dsa.Types.impl__new (sz 2560) signing_key; Libcrux_ml_dsa.Types.f_verification_key = - Libcrux_ml_dsa.Types.MLDSAVerificationKey verification_key - <: - Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1312) + Libcrux_ml_dsa.Types.impl_2__new (sz 1312) verification_key } <: Libcrux_ml_dsa.Types.t_MLDSAKeyPair (sz 1312) (sz 2560) @@ -35,7 +29,8 @@ let sign = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Neon.sign (sz 4) (sz 4) (sz 2) (sz 96) (sz 17) 95232l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) (sz 576) (sz 2560) (sz 2420) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 2560) signing_key <: t_Array u8 (sz 2560)) message + context randomness let sign_pre_hashed_shake128 (signing_key: Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 2560)) @@ -44,7 +39,8 @@ let sign_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Neon.sign_pre_hashed_shake128 (sz 4) (sz 4) (sz 2) (sz 96) (sz 17) 95232l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) (sz 576) (sz 2560) (sz 2420) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 2560) signing_key <: t_Array u8 (sz 2560)) message + context randomness let verify (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1312)) @@ -53,7 +49,8 @@ let verify = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Neon.verify (sz 4) (sz 4) (sz 2420) (sz 1312) (sz 17) (sz 576) 95232l 78l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1312) verification_key <: t_Array u8 (sz 1312)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 2420) signature <: t_Array u8 (sz 2420)) let verify_pre_hashed_shake128 (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1312)) @@ -62,4 +59,5 @@ let verify_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Neon.verify_pre_hashed_shake128 (sz 4) (sz 4) (sz 2420) (sz 1312) (sz 17) (sz 576) 95232l 78l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1312) verification_key <: t_Array u8 (sz 1312)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 2420) signature <: t_Array u8 (sz 2420)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.Portable.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.Portable.fst index b28affb1d..47feb8acb 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.Portable.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.Portable.fst @@ -14,16 +14,10 @@ let generate_key_pair (randomness: t_Array u8 (sz 32)) = randomness in { - Libcrux_ml_dsa.Types.f_signing_key - = - Libcrux_ml_dsa.Types.MLDSASigningKey signing_key - <: - Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 2560); + Libcrux_ml_dsa.Types.f_signing_key = Libcrux_ml_dsa.Types.impl__new (sz 2560) signing_key; Libcrux_ml_dsa.Types.f_verification_key = - Libcrux_ml_dsa.Types.MLDSAVerificationKey verification_key - <: - Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1312) + Libcrux_ml_dsa.Types.impl_2__new (sz 1312) verification_key } <: Libcrux_ml_dsa.Types.t_MLDSAKeyPair (sz 1312) (sz 2560) @@ -35,7 +29,8 @@ let sign = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Portable.sign (sz 4) (sz 4) (sz 2) (sz 96) (sz 17) 95232l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) (sz 576) (sz 2560) (sz 2420) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 2560) signing_key <: t_Array u8 (sz 2560)) message + context randomness let sign_pre_hashed_shake128 (signing_key: Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 2560)) @@ -44,7 +39,8 @@ let sign_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Portable.sign_pre_hashed_shake128 (sz 4) (sz 4) (sz 2) (sz 96) (sz 17) 95232l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) (sz 576) (sz 2560) - (sz 2420) signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (sz 2420) (Libcrux_ml_dsa.Types.impl__as_raw (sz 2560) signing_key <: t_Array u8 (sz 2560)) + message context randomness let verify (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1312)) @@ -53,7 +49,8 @@ let verify = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Portable.verify (sz 4) (sz 4) (sz 2420) (sz 1312) (sz 17) (sz 576) 95232l 78l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1312) verification_key <: t_Array u8 (sz 1312)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 2420) signature <: t_Array u8 (sz 2420)) let verify_pre_hashed_shake128 (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1312)) @@ -62,4 +59,5 @@ let verify_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Portable.verify_pre_hashed_shake128 (sz 4) (sz 4) (sz 2420) (sz 1312) (sz 17) (sz 576) 95232l 78l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1312) verification_key <: t_Array u8 (sz 1312)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 2420) signature <: t_Array u8 (sz 2420)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.fst index 4eff956f5..de9e24809 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_44_.fst @@ -14,16 +14,10 @@ let generate_key_pair (randomness: t_Array u8 (sz 32)) = randomness in { - Libcrux_ml_dsa.Types.f_signing_key - = - Libcrux_ml_dsa.Types.MLDSASigningKey signing_key - <: - Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 2560); + Libcrux_ml_dsa.Types.f_signing_key = Libcrux_ml_dsa.Types.impl__new (sz 2560) signing_key; Libcrux_ml_dsa.Types.f_verification_key = - Libcrux_ml_dsa.Types.MLDSAVerificationKey verification_key - <: - Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1312) + Libcrux_ml_dsa.Types.impl_2__new (sz 1312) verification_key } <: Libcrux_ml_dsa.Types.t_MLDSAKeyPair (sz 1312) (sz 2560) @@ -35,7 +29,8 @@ let sign = Libcrux_ml_dsa.Ml_dsa_generic.Multiplexing.sign (sz 4) (sz 4) (sz 2) (sz 96) (sz 17) 95232l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) (sz 576) (sz 2560) (sz 2420) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 2560) signing_key <: t_Array u8 (sz 2560)) message + context randomness let sign_pre_hashed_shake128 (signing_key: Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 2560)) @@ -44,7 +39,8 @@ let sign_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Multiplexing.sign_pre_hashed_shake128 (sz 4) (sz 4) (sz 2) (sz 96) (sz 17) 95232l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) (sz 576) (sz 2560) (sz 2420) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 2560) signing_key <: t_Array u8 (sz 2560)) message + context randomness let verify (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1312)) @@ -53,7 +49,8 @@ let verify = Libcrux_ml_dsa.Ml_dsa_generic.Multiplexing.verify (sz 4) (sz 4) (sz 2420) (sz 1312) (sz 17) (sz 576) 95232l 78l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1312) verification_key <: t_Array u8 (sz 1312)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 2420) signature <: t_Array u8 (sz 2420)) let verify_pre_hashed_shake128 (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1312)) @@ -62,4 +59,5 @@ let verify_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Multiplexing.verify_pre_hashed_shake128 (sz 4) (sz 4) (sz 2420) (sz 1312) (sz 17) (sz 576) 95232l 78l (sz 192) (sz 768) (sz 32) (sz 39) (sz 80) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1312) verification_key <: t_Array u8 (sz 1312)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 2420) signature <: t_Array u8 (sz 2420)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.Avx2.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.Avx2.fst index 4dcf80489..93a4a47d2 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.Avx2.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.Avx2.fst @@ -14,16 +14,10 @@ let generate_key_pair (randomness: t_Array u8 (sz 32)) = randomness in { - Libcrux_ml_dsa.Types.f_signing_key - = - Libcrux_ml_dsa.Types.MLDSASigningKey signing_key - <: - Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4032); + Libcrux_ml_dsa.Types.f_signing_key = Libcrux_ml_dsa.Types.impl__new (sz 4032) signing_key; Libcrux_ml_dsa.Types.f_verification_key = - Libcrux_ml_dsa.Types.MLDSAVerificationKey verification_key - <: - Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1952) + Libcrux_ml_dsa.Types.impl_2__new (sz 1952) verification_key } <: Libcrux_ml_dsa.Types.t_MLDSAKeyPair (sz 1952) (sz 4032) @@ -35,7 +29,8 @@ let sign = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Avx2.sign (sz 6) (sz 5) (sz 4) (sz 128) (sz 19) 261888l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) (sz 640) (sz 4032) (sz 3309) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4032) signing_key <: t_Array u8 (sz 4032)) message + context randomness let sign_pre_hashed_shake128 (signing_key: Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4032)) @@ -44,7 +39,8 @@ let sign_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Avx2.sign_pre_hashed_shake128 (sz 6) (sz 5) (sz 4) (sz 128) (sz 19) 261888l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) (sz 640) (sz 4032) (sz 3309) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4032) signing_key <: t_Array u8 (sz 4032)) message + context randomness let verify (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1952)) @@ -53,7 +49,8 @@ let verify = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Avx2.verify (sz 6) (sz 5) (sz 3309) (sz 1952) (sz 19) (sz 640) 261888l 196l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1952) verification_key <: t_Array u8 (sz 1952)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 3309) signature <: t_Array u8 (sz 3309)) let verify_pre_hashed_shake128 (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1952)) @@ -62,4 +59,5 @@ let verify_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Avx2.verify_pre_hashed_shake128 (sz 6) (sz 5) (sz 3309) (sz 1952) (sz 19) (sz 640) 261888l 196l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1952) verification_key <: t_Array u8 (sz 1952)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 3309) signature <: t_Array u8 (sz 3309)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.Neon.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.Neon.fst index b54a04df2..52cd13c55 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.Neon.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.Neon.fst @@ -14,16 +14,10 @@ let generate_key_pair (randomness: t_Array u8 (sz 32)) = randomness in { - Libcrux_ml_dsa.Types.f_signing_key - = - Libcrux_ml_dsa.Types.MLDSASigningKey signing_key - <: - Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4032); + Libcrux_ml_dsa.Types.f_signing_key = Libcrux_ml_dsa.Types.impl__new (sz 4032) signing_key; Libcrux_ml_dsa.Types.f_verification_key = - Libcrux_ml_dsa.Types.MLDSAVerificationKey verification_key - <: - Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1952) + Libcrux_ml_dsa.Types.impl_2__new (sz 1952) verification_key } <: Libcrux_ml_dsa.Types.t_MLDSAKeyPair (sz 1952) (sz 4032) @@ -35,7 +29,8 @@ let sign = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Neon.sign (sz 6) (sz 5) (sz 4) (sz 128) (sz 19) 261888l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) (sz 640) (sz 4032) (sz 3309) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4032) signing_key <: t_Array u8 (sz 4032)) message + context randomness let sign_pre_hashed_shake128 (signing_key: Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4032)) @@ -44,7 +39,8 @@ let sign_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Neon.sign_pre_hashed_shake128 (sz 6) (sz 5) (sz 4) (sz 128) (sz 19) 261888l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) (sz 640) (sz 4032) (sz 3309) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4032) signing_key <: t_Array u8 (sz 4032)) message + context randomness let verify (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1952)) @@ -53,7 +49,8 @@ let verify = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Neon.verify (sz 6) (sz 5) (sz 3309) (sz 1952) (sz 19) (sz 640) 261888l 196l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1952) verification_key <: t_Array u8 (sz 1952)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 3309) signature <: t_Array u8 (sz 3309)) let verify_pre_hashed_shake128 (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1952)) @@ -62,4 +59,5 @@ let verify_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Neon.verify_pre_hashed_shake128 (sz 6) (sz 5) (sz 3309) (sz 1952) (sz 19) (sz 640) 261888l 196l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1952) verification_key <: t_Array u8 (sz 1952)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 3309) signature <: t_Array u8 (sz 3309)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.Portable.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.Portable.fst index eaf1e627f..272c8f309 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.Portable.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.Portable.fst @@ -14,16 +14,10 @@ let generate_key_pair (randomness: t_Array u8 (sz 32)) = randomness in { - Libcrux_ml_dsa.Types.f_signing_key - = - Libcrux_ml_dsa.Types.MLDSASigningKey signing_key - <: - Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4032); + Libcrux_ml_dsa.Types.f_signing_key = Libcrux_ml_dsa.Types.impl__new (sz 4032) signing_key; Libcrux_ml_dsa.Types.f_verification_key = - Libcrux_ml_dsa.Types.MLDSAVerificationKey verification_key - <: - Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1952) + Libcrux_ml_dsa.Types.impl_2__new (sz 1952) verification_key } <: Libcrux_ml_dsa.Types.t_MLDSAKeyPair (sz 1952) (sz 4032) @@ -35,7 +29,8 @@ let sign = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Portable.sign (sz 6) (sz 5) (sz 4) (sz 128) (sz 19) 261888l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) (sz 640) (sz 4032) (sz 3309) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4032) signing_key <: t_Array u8 (sz 4032)) message + context randomness let sign_pre_hashed_shake128 (signing_key: Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4032)) @@ -44,7 +39,8 @@ let sign_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Portable.sign_pre_hashed_shake128 (sz 6) (sz 5) (sz 4) (sz 128) (sz 19) 261888l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) (sz 640) (sz 4032) - (sz 3309) signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (sz 3309) (Libcrux_ml_dsa.Types.impl__as_raw (sz 4032) signing_key <: t_Array u8 (sz 4032)) + message context randomness let verify (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1952)) @@ -53,7 +49,8 @@ let verify = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Portable.verify (sz 6) (sz 5) (sz 3309) (sz 1952) (sz 19) (sz 640) 261888l 196l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1952) verification_key <: t_Array u8 (sz 1952)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 3309) signature <: t_Array u8 (sz 3309)) let verify_pre_hashed_shake128 (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1952)) @@ -62,4 +59,5 @@ let verify_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Portable.verify_pre_hashed_shake128 (sz 6) (sz 5) (sz 3309) (sz 1952) (sz 19) (sz 640) 261888l 196l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1952) verification_key <: t_Array u8 (sz 1952)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 3309) signature <: t_Array u8 (sz 3309)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.fst index d75500055..47f6598f5 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_65_.fst @@ -14,16 +14,10 @@ let generate_key_pair (randomness: t_Array u8 (sz 32)) = randomness in { - Libcrux_ml_dsa.Types.f_signing_key - = - Libcrux_ml_dsa.Types.MLDSASigningKey signing_key - <: - Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4032); + Libcrux_ml_dsa.Types.f_signing_key = Libcrux_ml_dsa.Types.impl__new (sz 4032) signing_key; Libcrux_ml_dsa.Types.f_verification_key = - Libcrux_ml_dsa.Types.MLDSAVerificationKey verification_key - <: - Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1952) + Libcrux_ml_dsa.Types.impl_2__new (sz 1952) verification_key } <: Libcrux_ml_dsa.Types.t_MLDSAKeyPair (sz 1952) (sz 4032) @@ -35,7 +29,8 @@ let sign = Libcrux_ml_dsa.Ml_dsa_generic.Multiplexing.sign (sz 6) (sz 5) (sz 4) (sz 128) (sz 19) 261888l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) (sz 640) (sz 4032) (sz 3309) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4032) signing_key <: t_Array u8 (sz 4032)) message + context randomness let sign_pre_hashed_shake128 (signing_key: Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4032)) @@ -44,7 +39,8 @@ let sign_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Multiplexing.sign_pre_hashed_shake128 (sz 6) (sz 5) (sz 4) (sz 128) (sz 19) 261888l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) (sz 640) (sz 4032) (sz 3309) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4032) signing_key <: t_Array u8 (sz 4032)) message + context randomness let verify (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1952)) @@ -53,7 +49,8 @@ let verify = Libcrux_ml_dsa.Ml_dsa_generic.Multiplexing.verify (sz 6) (sz 5) (sz 3309) (sz 1952) (sz 19) (sz 640) 261888l 196l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1952) verification_key <: t_Array u8 (sz 1952)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 3309) signature <: t_Array u8 (sz 3309)) let verify_pre_hashed_shake128 (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 1952)) @@ -62,4 +59,5 @@ let verify_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Multiplexing.verify_pre_hashed_shake128 (sz 6) (sz 5) (sz 3309) (sz 1952) (sz 19) (sz 640) 261888l 196l (sz 128) (sz 768) (sz 48) (sz 49) (sz 55) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 1952) verification_key <: t_Array u8 (sz 1952)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 3309) signature <: t_Array u8 (sz 3309)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.Avx2.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.Avx2.fst index 27eb5b514..a5cb7cc82 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.Avx2.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.Avx2.fst @@ -14,16 +14,10 @@ let generate_key_pair (randomness: t_Array u8 (sz 32)) = randomness in { - Libcrux_ml_dsa.Types.f_signing_key - = - Libcrux_ml_dsa.Types.MLDSASigningKey signing_key - <: - Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4896); + Libcrux_ml_dsa.Types.f_signing_key = Libcrux_ml_dsa.Types.impl__new (sz 4896) signing_key; Libcrux_ml_dsa.Types.f_verification_key = - Libcrux_ml_dsa.Types.MLDSAVerificationKey verification_key - <: - Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 2592) + Libcrux_ml_dsa.Types.impl_2__new (sz 2592) verification_key } <: Libcrux_ml_dsa.Types.t_MLDSAKeyPair (sz 2592) (sz 4896) @@ -35,7 +29,8 @@ let sign = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Avx2.sign (sz 8) (sz 7) (sz 2) (sz 96) (sz 19) 261888l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) (sz 640) (sz 4896) (sz 4627) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4896) signing_key <: t_Array u8 (sz 4896)) message + context randomness let sign_pre_hashed_shake128 (signing_key: Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4896)) @@ -44,7 +39,8 @@ let sign_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Avx2.sign_pre_hashed_shake128 (sz 8) (sz 7) (sz 2) (sz 96) (sz 19) 261888l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) (sz 640) (sz 4896) (sz 4627) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4896) signing_key <: t_Array u8 (sz 4896)) message + context randomness let verify (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 2592)) @@ -53,7 +49,8 @@ let verify = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Avx2.verify (sz 8) (sz 7) (sz 4627) (sz 2592) (sz 19) (sz 640) 261888l 120l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 2592) verification_key <: t_Array u8 (sz 2592)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 4627) signature <: t_Array u8 (sz 4627)) let verify_pre_hashed_shake128 (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 2592)) @@ -62,4 +59,5 @@ let verify_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Avx2.verify_pre_hashed_shake128 (sz 8) (sz 7) (sz 4627) (sz 2592) (sz 19) (sz 640) 261888l 120l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 2592) verification_key <: t_Array u8 (sz 2592)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 4627) signature <: t_Array u8 (sz 4627)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.Neon.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.Neon.fst index e89d61679..bec5c242e 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.Neon.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.Neon.fst @@ -14,16 +14,10 @@ let generate_key_pair (randomness: t_Array u8 (sz 32)) = randomness in { - Libcrux_ml_dsa.Types.f_signing_key - = - Libcrux_ml_dsa.Types.MLDSASigningKey signing_key - <: - Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4896); + Libcrux_ml_dsa.Types.f_signing_key = Libcrux_ml_dsa.Types.impl__new (sz 4896) signing_key; Libcrux_ml_dsa.Types.f_verification_key = - Libcrux_ml_dsa.Types.MLDSAVerificationKey verification_key - <: - Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 2592) + Libcrux_ml_dsa.Types.impl_2__new (sz 2592) verification_key } <: Libcrux_ml_dsa.Types.t_MLDSAKeyPair (sz 2592) (sz 4896) @@ -35,7 +29,8 @@ let sign = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Neon.sign (sz 8) (sz 7) (sz 2) (sz 96) (sz 19) 261888l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) (sz 640) (sz 4896) (sz 4627) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4896) signing_key <: t_Array u8 (sz 4896)) message + context randomness let sign_pre_hashed_shake128 (signing_key: Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4896)) @@ -44,7 +39,8 @@ let sign_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Neon.sign_pre_hashed_shake128 (sz 8) (sz 7) (sz 2) (sz 96) (sz 19) 261888l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) (sz 640) (sz 4896) (sz 4627) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4896) signing_key <: t_Array u8 (sz 4896)) message + context randomness let verify (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 2592)) @@ -53,7 +49,8 @@ let verify = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Neon.verify (sz 8) (sz 7) (sz 4627) (sz 2592) (sz 19) (sz 640) 261888l 120l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 2592) verification_key <: t_Array u8 (sz 2592)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 4627) signature <: t_Array u8 (sz 4627)) let verify_pre_hashed_shake128 (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 2592)) @@ -62,4 +59,5 @@ let verify_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Neon.verify_pre_hashed_shake128 (sz 8) (sz 7) (sz 4627) (sz 2592) (sz 19) (sz 640) 261888l 120l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 2592) verification_key <: t_Array u8 (sz 2592)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 4627) signature <: t_Array u8 (sz 4627)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.Portable.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.Portable.fst index 8ff301da4..a5b4a3a2a 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.Portable.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.Portable.fst @@ -14,16 +14,10 @@ let generate_key_pair (randomness: t_Array u8 (sz 32)) = randomness in { - Libcrux_ml_dsa.Types.f_signing_key - = - Libcrux_ml_dsa.Types.MLDSASigningKey signing_key - <: - Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4896); + Libcrux_ml_dsa.Types.f_signing_key = Libcrux_ml_dsa.Types.impl__new (sz 4896) signing_key; Libcrux_ml_dsa.Types.f_verification_key = - Libcrux_ml_dsa.Types.MLDSAVerificationKey verification_key - <: - Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 2592) + Libcrux_ml_dsa.Types.impl_2__new (sz 2592) verification_key } <: Libcrux_ml_dsa.Types.t_MLDSAKeyPair (sz 2592) (sz 4896) @@ -35,7 +29,8 @@ let sign = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Portable.sign (sz 8) (sz 7) (sz 2) (sz 96) (sz 19) 261888l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) (sz 640) (sz 4896) (sz 4627) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4896) signing_key <: t_Array u8 (sz 4896)) message + context randomness let sign_pre_hashed_shake128 (signing_key: Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4896)) @@ -44,7 +39,8 @@ let sign_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Portable.sign_pre_hashed_shake128 (sz 8) (sz 7) (sz 2) (sz 96) (sz 19) 261888l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) (sz 640) (sz 4896) - (sz 4627) signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (sz 4627) (Libcrux_ml_dsa.Types.impl__as_raw (sz 4896) signing_key <: t_Array u8 (sz 4896)) + message context randomness let verify (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 2592)) @@ -53,7 +49,8 @@ let verify = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Portable.verify (sz 8) (sz 7) (sz 4627) (sz 2592) (sz 19) (sz 640) 261888l 120l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 2592) verification_key <: t_Array u8 (sz 2592)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 4627) signature <: t_Array u8 (sz 4627)) let verify_pre_hashed_shake128 (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 2592)) @@ -62,4 +59,5 @@ let verify_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Instantiations.Portable.verify_pre_hashed_shake128 (sz 8) (sz 7) (sz 4627) (sz 2592) (sz 19) (sz 640) 261888l 120l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 2592) verification_key <: t_Array u8 (sz 2592)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 4627) signature <: t_Array u8 (sz 4627)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.fst index 7628dbe10..b7bfad8f1 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_87_.fst @@ -14,16 +14,10 @@ let generate_key_pair (randomness: t_Array u8 (sz 32)) = randomness in { - Libcrux_ml_dsa.Types.f_signing_key - = - Libcrux_ml_dsa.Types.MLDSASigningKey signing_key - <: - Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4896); + Libcrux_ml_dsa.Types.f_signing_key = Libcrux_ml_dsa.Types.impl__new (sz 4896) signing_key; Libcrux_ml_dsa.Types.f_verification_key = - Libcrux_ml_dsa.Types.MLDSAVerificationKey verification_key - <: - Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 2592) + Libcrux_ml_dsa.Types.impl_2__new (sz 2592) verification_key } <: Libcrux_ml_dsa.Types.t_MLDSAKeyPair (sz 2592) (sz 4896) @@ -35,7 +29,8 @@ let sign = Libcrux_ml_dsa.Ml_dsa_generic.Multiplexing.sign (sz 8) (sz 7) (sz 2) (sz 96) (sz 19) 261888l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) (sz 640) (sz 4896) (sz 4627) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4896) signing_key <: t_Array u8 (sz 4896)) message + context randomness let sign_pre_hashed_shake128 (signing_key: Libcrux_ml_dsa.Types.t_MLDSASigningKey (sz 4896)) @@ -44,7 +39,8 @@ let sign_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Multiplexing.sign_pre_hashed_shake128 (sz 8) (sz 7) (sz 2) (sz 96) (sz 19) 261888l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) (sz 640) (sz 4896) (sz 4627) - signing_key.Libcrux_ml_dsa.Types._0 message context randomness + (Libcrux_ml_dsa.Types.impl__as_raw (sz 4896) signing_key <: t_Array u8 (sz 4896)) message + context randomness let verify (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 2592)) @@ -53,7 +49,8 @@ let verify = Libcrux_ml_dsa.Ml_dsa_generic.Multiplexing.verify (sz 8) (sz 7) (sz 4627) (sz 2592) (sz 19) (sz 640) 261888l 120l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 2592) verification_key <: t_Array u8 (sz 2592)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 4627) signature <: t_Array u8 (sz 4627)) let verify_pre_hashed_shake128 (verification_key: Libcrux_ml_dsa.Types.t_MLDSAVerificationKey (sz 2592)) @@ -62,4 +59,5 @@ let verify_pre_hashed_shake128 = Libcrux_ml_dsa.Ml_dsa_generic.Multiplexing.verify_pre_hashed_shake128 (sz 8) (sz 7) (sz 4627) (sz 2592) (sz 19) (sz 640) 261888l 120l (sz 128) (sz 1024) (sz 64) (sz 60) (sz 75) - verification_key.Libcrux_ml_dsa.Types._0 message context signature.Libcrux_ml_dsa.Types._0 + (Libcrux_ml_dsa.Types.impl_2__as_raw (sz 2592) verification_key <: t_Array u8 (sz 2592)) message + context (Libcrux_ml_dsa.Types.impl_4__as_raw (sz 4627) signature <: t_Array u8 (sz 4627)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_generic.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_generic.fst index 878dd2cb5..0af8aebcb 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_generic.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Ml_dsa_generic.fst @@ -131,7 +131,6 @@ let sign_internal (t_Array (Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) v_COLUMNS_IN_A) v_ROWS_IN_A = Libcrux_ml_dsa.Samplex4.matrix_A #v_SIMDUnit - #v_Shake128X4 v_ROWS_IN_A v_COLUMNS_IN_A (Libcrux_ml_dsa.Utils.into_padded_array (sz 34) (seed_for_A <: t_Slice u8) @@ -474,20 +473,17 @@ let sign_internal v_MAX_ONES_IN_HINT v_SIGNATURE_SIZE ({ - Libcrux_ml_dsa.Types.f_commitment_hash = commitment_hash; - Libcrux_ml_dsa.Types.f_signer_response = signer_response; - Libcrux_ml_dsa.Types.f_hint = hint + Libcrux_ml_dsa.Encoding.Signature.f_commitment_hash = commitment_hash; + Libcrux_ml_dsa.Encoding.Signature.f_signer_response = signer_response; + Libcrux_ml_dsa.Encoding.Signature.f_hint = hint } <: - Libcrux_ml_dsa.Types.t_Signature v_SIMDUnit + Libcrux_ml_dsa.Encoding.Signature.t_Signature v_SIMDUnit v_COMMITMENT_HASH_SIZE v_COLUMNS_IN_A v_ROWS_IN_A) in - Core.Result.Result_Ok - (Libcrux_ml_dsa.Types.MLDSASignature signature - <: - Libcrux_ml_dsa.Types.t_MLDSASignature v_SIGNATURE_SIZE) + Core.Result.Result_Ok (Libcrux_ml_dsa.Types.impl_4__new v_SIGNATURE_SIZE signature) <: Core.Result.t_Result (Libcrux_ml_dsa.Types.t_MLDSASignature v_SIGNATURE_SIZE) Libcrux_ml_dsa.Types.t_SigningError @@ -661,7 +657,7 @@ let verify_internal if ~.(Libcrux_ml_dsa.Arithmetic.vector_infinity_norm_exceeds #v_SIMDUnit v_COLUMNS_IN_A - signature.Libcrux_ml_dsa.Types.f_signer_response + signature.Libcrux_ml_dsa.Encoding.Signature.f_signer_response ((2l <. commitment_hash + if signature.Libcrux_ml_dsa.Encoding.Signature.f_commitment_hash <>. commitment_hash then Core.Result.Result_Err (Libcrux_ml_dsa.Types.VerificationError_CommitmentHashesDontMatchError @@ -920,7 +915,6 @@ let generate_key_pair (t_Array (Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) v_COLUMNS_IN_A) v_ROWS_IN_A = Libcrux_ml_dsa.Samplex4.matrix_A #v_SIMDUnit - #v_Shake128X4 v_ROWS_IN_A v_COLUMNS_IN_A (Libcrux_ml_dsa.Utils.into_padded_array (sz 34) seed_for_a <: t_Array u8 (sz 34)) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Polynomial.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Polynomial.fst index d92cb4d77..92db55cce 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Polynomial.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Polynomial.fst @@ -9,36 +9,6 @@ let _ = let open Libcrux_ml_dsa.Simd.Traits in () -let impl__infinity_norm_exceeds - (#v_SIMDUnit: Type0) - (#[FStar.Tactics.Typeclasses.tcresolve ()] - i2: - Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit) - (self: t_PolynomialRingElement v_SIMDUnit) - (bound: i32) - = - let exceeds:bool = false in - let exceeds:bool = - Core.Iter.Traits.Iterator.f_fold (Core.Iter.Traits.Collect.f_into_iter #(t_Array v_SIMDUnit - (sz 32)) - #FStar.Tactics.Typeclasses.solve - self.f_simd_units - <: - Core.Array.Iter.t_IntoIter v_SIMDUnit (sz 32)) - exceeds - (fun exceeds simd_unit -> - let exceeds:bool = exceeds in - let simd_unit:v_SIMDUnit = simd_unit in - exceeds || - (Libcrux_ml_dsa.Simd.Traits.f_infinity_norm_exceeds #v_SIMDUnit - #FStar.Tactics.Typeclasses.solve - simd_unit - bound - <: - bool)) - in - exceeds - let impl__ZERO (#v_SIMDUnit: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] @@ -160,6 +130,36 @@ let impl__add in sum +let impl__infinity_norm_exceeds + (#v_SIMDUnit: Type0) + (#[FStar.Tactics.Typeclasses.tcresolve ()] + i2: + Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit) + (self: t_PolynomialRingElement v_SIMDUnit) + (bound: i32) + = + let exceeds:bool = false in + let exceeds:bool = + Rust_primitives.Hax.Folds.fold_range (sz 0) + (Core.Slice.impl__len #v_SIMDUnit (self.f_simd_units <: t_Slice v_SIMDUnit) <: usize) + (fun exceeds temp_1_ -> + let exceeds:bool = exceeds in + let _:usize = temp_1_ in + true) + exceeds + (fun exceeds i -> + let exceeds:bool = exceeds in + let i:usize = i in + exceeds || + (Libcrux_ml_dsa.Simd.Traits.f_infinity_norm_exceeds #v_SIMDUnit + #FStar.Tactics.Typeclasses.solve + (self.f_simd_units.[ i ] <: v_SIMDUnit) + bound + <: + bool)) + in + exceeds + let impl__subtract (#v_SIMDUnit: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Polynomial.fsti b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Polynomial.fsti index 918eb2620..6f7a5837e 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Polynomial.fsti +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Polynomial.fsti @@ -13,13 +13,6 @@ type t_PolynomialRingElement (v_SIMDUnit: Type0) {| i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} = { f_simd_units:t_Array v_SIMDUnit (sz 32) } -val impl__infinity_norm_exceeds - (#v_SIMDUnit: Type0) - {| i2: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} - (self: t_PolynomialRingElement v_SIMDUnit) - (bound: i32) - : Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True) - val impl__ZERO: #v_SIMDUnit: Type0 -> {| i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} -> @@ -38,6 +31,13 @@ val impl__add (self rhs: t_PolynomialRingElement v_SIMDUnit) : Prims.Pure (t_PolynomialRingElement v_SIMDUnit) Prims.l_True (fun _ -> Prims.l_True) +val impl__infinity_norm_exceeds + (#v_SIMDUnit: Type0) + {| i2: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} + (self: t_PolynomialRingElement v_SIMDUnit) + (bound: i32) + : Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True) + val impl__subtract (#v_SIMDUnit: Type0) {| i2: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Pre_hash.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Pre_hash.fst index c8f3084d4..839ac9c79 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Pre_hash.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Pre_hash.fst @@ -28,3 +28,62 @@ let impl_1__new (context: t_Slice u8) (pre_hash_oid: Core.Option.t_Option (t_Arr let t_DomainSeparationError_cast_to_repr (x: t_DomainSeparationError) = match x with | DomainSeparationError_ContextTooLongError -> isz 0 + +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl_2: Core.Convert.t_From Libcrux_ml_dsa.Types.t_SigningError t_DomainSeparationError = + { + f_from_pre = (fun (e: t_DomainSeparationError) -> true); + f_from_post + = + (fun (e: t_DomainSeparationError) (out: Libcrux_ml_dsa.Types.t_SigningError) -> true); + f_from + = + fun (e: t_DomainSeparationError) -> + match e with + | DomainSeparationError_ContextTooLongError -> + Libcrux_ml_dsa.Types.SigningError_ContextTooLongError <: Libcrux_ml_dsa.Types.t_SigningError + } + +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl_3: Core.Convert.t_From Libcrux_ml_dsa.Types.t_VerificationError t_DomainSeparationError = + { + f_from_pre = (fun (e: t_DomainSeparationError) -> true); + f_from_post + = + (fun (e: t_DomainSeparationError) (out: Libcrux_ml_dsa.Types.t_VerificationError) -> true); + f_from + = + fun (e: t_DomainSeparationError) -> + match e with + | DomainSeparationError_ContextTooLongError -> + Libcrux_ml_dsa.Types.VerificationError_ContextTooLongError + <: + Libcrux_ml_dsa.Types.t_VerificationError + } + +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl: t_PreHash t_SHAKE128_PH (sz 256) = + { + f_oid_pre = (fun (_: Prims.unit) -> true); + f_oid_post = (fun (_: Prims.unit) (out: t_Array u8 (sz 11)) -> true); + f_oid + = + (fun (_: Prims.unit) -> + let list = [6uy; 9uy; 96uy; 134uy; 72uy; 1uy; 101uy; 3uy; 4uy; 2uy; 11uy] in + FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 11); + Rust_primitives.Hax.array_of_list 11 list); + f_hash_pre = (fun (message: t_Slice u8) -> true); + f_hash_post = (fun (message: t_Slice u8) (out: t_Array u8 (sz 256)) -> true); + f_hash + = + fun (message: t_Slice u8) -> + let output:t_Array u8 (sz 256) = Rust_primitives.Hax.repeat 0uy (sz 256) in + let output:t_Array u8 (sz 256) = + Libcrux_ml_dsa.Hash_functions.Shake128.f_shake128 #Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128 + #FStar.Tactics.Typeclasses.solve + (sz 256) + message + output + in + output + } diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Pre_hash.fsti b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Pre_hash.fsti index 2e097f642..2dc40559b 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Pre_hash.fsti +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Pre_hash.fsti @@ -54,60 +54,10 @@ type t_SHAKE128_PH = | SHAKE128_PH : t_SHAKE128_PH let v_PRE_HASH_OID_LEN: usize = sz 11 [@@ FStar.Tactics.Typeclasses.tcinstance] -let impl_2: Core.Convert.t_From Libcrux_ml_dsa.Types.t_SigningError t_DomainSeparationError = - { - f_from_pre = (fun (e: t_DomainSeparationError) -> true); - f_from_post - = - (fun (e: t_DomainSeparationError) (out: Libcrux_ml_dsa.Types.t_SigningError) -> true); - f_from - = - fun (e: t_DomainSeparationError) -> - match e with - | DomainSeparationError_ContextTooLongError -> - Libcrux_ml_dsa.Types.SigningError_ContextTooLongError <: Libcrux_ml_dsa.Types.t_SigningError - } +val impl_2:Core.Convert.t_From Libcrux_ml_dsa.Types.t_SigningError t_DomainSeparationError [@@ FStar.Tactics.Typeclasses.tcinstance] -let impl_3: Core.Convert.t_From Libcrux_ml_dsa.Types.t_VerificationError t_DomainSeparationError = - { - f_from_pre = (fun (e: t_DomainSeparationError) -> true); - f_from_post - = - (fun (e: t_DomainSeparationError) (out: Libcrux_ml_dsa.Types.t_VerificationError) -> true); - f_from - = - fun (e: t_DomainSeparationError) -> - match e with - | DomainSeparationError_ContextTooLongError -> - Libcrux_ml_dsa.Types.VerificationError_ContextTooLongError - <: - Libcrux_ml_dsa.Types.t_VerificationError - } +val impl_3:Core.Convert.t_From Libcrux_ml_dsa.Types.t_VerificationError t_DomainSeparationError [@@ FStar.Tactics.Typeclasses.tcinstance] -let impl: t_PreHash t_SHAKE128_PH (sz 256) = - { - f_oid_pre = (fun (_: Prims.unit) -> true); - f_oid_post = (fun (_: Prims.unit) (out: t_Array u8 (sz 11)) -> true); - f_oid - = - (fun (_: Prims.unit) -> - let list = [6uy; 9uy; 96uy; 134uy; 72uy; 1uy; 101uy; 3uy; 4uy; 2uy; 11uy] in - FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 11); - Rust_primitives.Hax.array_of_list 11 list); - f_hash_pre = (fun (message: t_Slice u8) -> true); - f_hash_post = (fun (message: t_Slice u8) (out: t_Array u8 (sz 256)) -> true); - f_hash - = - fun (message: t_Slice u8) -> - let output:t_Array u8 (sz 256) = Rust_primitives.Hax.repeat 0uy (sz 256) in - let output:t_Array u8 (sz 256) = - Libcrux_ml_dsa.Hash_functions.Shake128.f_shake128 #Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128 - #FStar.Tactics.Typeclasses.solve - (sz 256) - message - output - in - output - } +val impl:t_PreHash t_SHAKE128_PH (sz 256) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Sample.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Sample.fst index f2d7ff6c7..2c3c28d2f 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Sample.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Sample.fst @@ -6,6 +6,7 @@ open FStar.Mul let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) + let open Libcrux_ml_dsa.Hash_functions.Portable in let open Libcrux_ml_dsa.Hash_functions.Shake128 in let open Libcrux_ml_dsa.Hash_functions.Shake256 in let open Libcrux_ml_dsa.Simd.Traits in @@ -681,13 +682,10 @@ let sample_four_error_ring_elements Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) let sample_four_ring_elements - (#v_SIMDUnit #v_Shake128: Type0) + (#v_SIMDUnit: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] - i2: + i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit) - (#[FStar.Tactics.Typeclasses.tcresolve ()] - i3: - Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 v_Shake128) (seed0: t_Array u8 (sz 34)) (domain_separator0 domain_separator1 domain_seperator2 domain_separator3: u16) = @@ -734,8 +732,8 @@ let sample_four_ring_elements (sz 33) (cast (domain_separator3 >>! 8l <: u16) <: u8) in - let state:v_Shake128 = - Libcrux_ml_dsa.Hash_functions.Shake128.f_init_absorb #v_Shake128 + let state:Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4 = + Libcrux_ml_dsa.Hash_functions.Shake128.f_init_absorb #Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4 #FStar.Tactics.Typeclasses.solve (seed0 <: t_Slice u8) (seed1 <: t_Slice u8) @@ -746,10 +744,12 @@ let sample_four_ring_elements let randomness1:t_Array u8 (sz 840) = Rust_primitives.Hax.repeat 0uy (sz 840) in let randomness2:t_Array u8 (sz 840) = Rust_primitives.Hax.repeat 0uy (sz 840) in let randomness3:t_Array u8 (sz 840) = Rust_primitives.Hax.repeat 0uy (sz 840) in - let tmp0, tmp1, tmp2, tmp3, tmp4:(v_Shake128 & t_Array u8 (sz 840) & t_Array u8 (sz 840) & + let tmp0, tmp1, tmp2, tmp3, tmp4:(Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4 & + t_Array u8 (sz 840) & + t_Array u8 (sz 840) & t_Array u8 (sz 840) & t_Array u8 (sz 840)) = - Libcrux_ml_dsa.Hash_functions.Shake128.f_squeeze_first_five_blocks #v_Shake128 + Libcrux_ml_dsa.Hash_functions.Shake128.f_squeeze_first_five_blocks #Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4 #FStar.Tactics.Typeclasses.solve state randomness0 @@ -757,7 +757,7 @@ let sample_four_ring_elements randomness2 randomness3 in - let state:v_Shake128 = tmp0 in + let state:Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4 = tmp0 in let randomness0:t_Array u8 (sz 840) = tmp1 in let randomness1:t_Array u8 (sz 840) = tmp2 in let randomness2:t_Array u8 (sz 840) = tmp3 in @@ -829,7 +829,7 @@ let sample_four_ring_elements usize & usize & usize & - v_Shake128) = + Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4) = Rust_primitives.f_while_loop (fun temp_0_ -> let coefficients0, @@ -854,7 +854,7 @@ let sample_four_ring_elements usize & usize & usize & - v_Shake128) = + Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4) = temp_0_ in (~.done0 <: bool) || (~.done1 <: bool) || (~.done2 <: bool) || (~.done3 <: bool)) @@ -881,7 +881,7 @@ let sample_four_ring_elements usize & usize & usize & - v_Shake128)) + Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4)) (fun temp_0_ -> let coefficients0, @@ -906,17 +906,17 @@ let sample_four_ring_elements usize & usize & usize & - v_Shake128) = + Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4) = temp_0_ in - let tmp0, out:(v_Shake128 & + let tmp0, out:(Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4 & (t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168))) = - Libcrux_ml_dsa.Hash_functions.Shake128.f_squeeze_next_block #v_Shake128 + Libcrux_ml_dsa.Hash_functions.Shake128.f_squeeze_next_block #Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4 #FStar.Tactics.Typeclasses.solve state in - let state:v_Shake128 = tmp0 in + let state:Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4 = tmp0 in let randomnesses:(t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168) & t_Array u8 (sz 168)) = out @@ -1001,7 +1001,7 @@ let sample_four_ring_elements usize & usize & usize & - v_Shake128) + Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4) else coefficients0, coefficients1, @@ -1027,7 +1027,7 @@ let sample_four_ring_elements usize & usize & usize & - v_Shake128)) + Libcrux_ml_dsa.Hash_functions.Portable.t_Shake128X4)) in Libcrux_ml_dsa.Polynomial.impl__from_i32_array #v_SIMDUnit (coefficients0 <: t_Slice i32), Libcrux_ml_dsa.Polynomial.impl__from_i32_array #v_SIMDUnit (coefficients1 <: t_Slice i32), diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Sample.fsti b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Sample.fsti index a742ab51f..6f4c4d09d 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Sample.fsti +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Sample.fsti @@ -6,6 +6,7 @@ open FStar.Mul let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) + let open Libcrux_ml_dsa.Hash_functions.Portable in let open Libcrux_ml_dsa.Hash_functions.Shake128 in let open Libcrux_ml_dsa.Hash_functions.Shake256 in let open Libcrux_ml_dsa.Simd.Traits in @@ -80,9 +81,8 @@ val sample_four_error_ring_elements (fun _ -> Prims.l_True) val sample_four_ring_elements - (#v_SIMDUnit #v_Shake128: Type0) - {| i2: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} - {| i3: Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 v_Shake128 |} + (#v_SIMDUnit: Type0) + {| i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} (seed0: t_Array u8 (sz 34)) (domain_separator0 domain_separator1 domain_seperator2 domain_separator3: u16) : Prims.Pure diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Samplex4.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Samplex4.fst index ac648b477..06a86b638 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Samplex4.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Samplex4.fst @@ -6,7 +6,6 @@ open FStar.Mul let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) - let open Libcrux_ml_dsa.Hash_functions.Shake128 in let open Libcrux_ml_dsa.Hash_functions.Shake256 in let open Libcrux_ml_dsa.Simd.Traits in () @@ -43,14 +42,11 @@ let update_matrix m let matrix_A_4_by_4_ - (#v_SIMDUnit #v_Shake128X4: Type0) + (#v_SIMDUnit: Type0) (v_ROWS_IN_A v_COLUMNS_IN_A: usize) (#[FStar.Tactics.Typeclasses.tcresolve ()] - i2: + i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit) - (#[FStar.Tactics.Typeclasses.tcresolve ()] - i3: - Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 v_Shake128X4) (seed: t_Array u8 (sz 34)) = let @@ -73,7 +69,6 @@ let matrix_A_4_by_4_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 0uy 0uy <: u16) (generate_domain_separator 0uy 1uy <: u16) @@ -105,7 +100,6 @@ let matrix_A_4_by_4_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 1uy 0uy <: u16) (generate_domain_separator 1uy 1uy <: u16) @@ -137,7 +131,6 @@ let matrix_A_4_by_4_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 2uy 0uy <: u16) (generate_domain_separator 2uy 1uy <: u16) @@ -169,7 +162,6 @@ let matrix_A_4_by_4_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 3uy 0uy <: u16) (generate_domain_separator 3uy 1uy <: u16) @@ -199,14 +191,11 @@ let matrix_A_4_by_4_ v_A let matrix_A_6_by_5_ - (#v_SIMDUnit #v_Shake128X4: Type0) + (#v_SIMDUnit: Type0) (v_ROWS_IN_A v_COLUMNS_IN_A: usize) (#[FStar.Tactics.Typeclasses.tcresolve ()] - i2: + i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit) - (#[FStar.Tactics.Typeclasses.tcresolve ()] - i3: - Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 v_Shake128X4) (seed: t_Array u8 (sz 34)) = let v_A:t_Array @@ -226,7 +215,6 @@ let matrix_A_6_by_5_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 0uy 0uy <: u16) (generate_domain_separator 0uy 1uy <: u16) @@ -258,7 +246,6 @@ let matrix_A_6_by_5_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 0uy 4uy <: u16) (generate_domain_separator 1uy 0uy <: u16) @@ -290,7 +277,6 @@ let matrix_A_6_by_5_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 1uy 3uy <: u16) (generate_domain_separator 1uy 4uy <: u16) @@ -322,7 +308,6 @@ let matrix_A_6_by_5_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 2uy 2uy <: u16) (generate_domain_separator 2uy 3uy <: u16) @@ -354,7 +339,6 @@ let matrix_A_6_by_5_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 3uy 1uy <: u16) (generate_domain_separator 3uy 2uy <: u16) @@ -386,7 +370,6 @@ let matrix_A_6_by_5_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 4uy 0uy <: u16) (generate_domain_separator 4uy 1uy <: u16) @@ -418,7 +401,6 @@ let matrix_A_6_by_5_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 4uy 4uy <: u16) (generate_domain_separator 5uy 0uy <: u16) @@ -450,7 +432,6 @@ let matrix_A_6_by_5_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 5uy 3uy <: u16) (generate_domain_separator 5uy 4uy <: u16) @@ -470,14 +451,11 @@ let matrix_A_6_by_5_ v_A let matrix_A_8_by_7_ - (#v_SIMDUnit #v_Shake128X4: Type0) + (#v_SIMDUnit: Type0) (v_ROWS_IN_A v_COLUMNS_IN_A: usize) (#[FStar.Tactics.Typeclasses.tcresolve ()] - i2: + i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit) - (#[FStar.Tactics.Typeclasses.tcresolve ()] - i3: - Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 v_Shake128X4) (seed: t_Array u8 (sz 34)) = let v_A:t_Array @@ -497,7 +475,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 0uy 0uy <: u16) (generate_domain_separator 0uy 1uy <: u16) @@ -529,7 +506,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 0uy 4uy <: u16) (generate_domain_separator 0uy 5uy <: u16) @@ -561,7 +537,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 1uy 1uy <: u16) (generate_domain_separator 1uy 2uy <: u16) @@ -593,7 +568,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 1uy 5uy <: u16) (generate_domain_separator 1uy 6uy <: u16) @@ -625,7 +599,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 2uy 2uy <: u16) (generate_domain_separator 2uy 3uy <: u16) @@ -657,7 +630,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 2uy 6uy <: u16) (generate_domain_separator 3uy 0uy <: u16) @@ -689,7 +661,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 3uy 3uy <: u16) (generate_domain_separator 3uy 4uy <: u16) @@ -721,7 +692,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 4uy 0uy <: u16) (generate_domain_separator 4uy 1uy <: u16) @@ -753,7 +723,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 4uy 4uy <: u16) (generate_domain_separator 4uy 5uy <: u16) @@ -785,7 +754,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 5uy 1uy <: u16) (generate_domain_separator 5uy 2uy <: u16) @@ -817,7 +785,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 5uy 5uy <: u16) (generate_domain_separator 5uy 6uy <: u16) @@ -849,7 +816,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 6uy 2uy <: u16) (generate_domain_separator 6uy 3uy <: u16) @@ -881,7 +847,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 6uy 6uy <: u16) (generate_domain_separator 7uy 0uy <: u16) @@ -913,7 +878,6 @@ let matrix_A_8_by_7_ Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit & Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) = Libcrux_ml_dsa.Sample.sample_four_ring_elements #v_SIMDUnit - #v_Shake128X4 seed (generate_domain_separator 7uy 3uy <: u16) (generate_domain_separator 7uy 4uy <: u16) @@ -943,22 +907,19 @@ let matrix_A_8_by_7_ v_A let matrix_A - (#v_SIMDUnit #v_Shake128X4: Type0) + (#v_SIMDUnit: Type0) (v_ROWS_IN_A v_COLUMNS_IN_A: usize) (#[FStar.Tactics.Typeclasses.tcresolve ()] - i2: + i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit) - (#[FStar.Tactics.Typeclasses.tcresolve ()] - i3: - Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 v_Shake128X4) (seed: t_Array u8 (sz 34)) = match (cast (v_ROWS_IN_A <: usize) <: u8), (cast (v_COLUMNS_IN_A <: usize) <: u8) <: (u8 & u8) with - | 4uy, 4uy -> matrix_A_4_by_4_ #v_SIMDUnit #v_Shake128X4 v_ROWS_IN_A v_COLUMNS_IN_A seed - | 6uy, 5uy -> matrix_A_6_by_5_ #v_SIMDUnit #v_Shake128X4 v_ROWS_IN_A v_COLUMNS_IN_A seed - | 8uy, 7uy -> matrix_A_8_by_7_ #v_SIMDUnit #v_Shake128X4 v_ROWS_IN_A v_COLUMNS_IN_A seed + | 4uy, 4uy -> matrix_A_4_by_4_ #v_SIMDUnit v_ROWS_IN_A v_COLUMNS_IN_A seed + | 6uy, 5uy -> matrix_A_6_by_5_ #v_SIMDUnit v_ROWS_IN_A v_COLUMNS_IN_A seed + | 8uy, 7uy -> matrix_A_8_by_7_ #v_SIMDUnit v_ROWS_IN_A v_COLUMNS_IN_A seed | _ -> Rust_primitives.Hax.never_to_any (Core.Panicking.panic "internal error: entered unreachable code" diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Samplex4.fsti b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Samplex4.fsti index a914aec27..e1b9a56dc 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Samplex4.fsti +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Samplex4.fsti @@ -6,7 +6,6 @@ open FStar.Mul let _ = (* This module has implicit dependencies, here we make them explicit. *) (* The implicit dependencies arise from typeclasses instances. *) - let open Libcrux_ml_dsa.Hash_functions.Shake128 in let open Libcrux_ml_dsa.Hash_functions.Shake256 in let open Libcrux_ml_dsa.Simd.Traits in () @@ -29,10 +28,9 @@ val update_matrix v_ROWS_IN_A) Prims.l_True (fun _ -> Prims.l_True) val matrix_A_4_by_4_ - (#v_SIMDUnit #v_Shake128X4: Type0) + (#v_SIMDUnit: Type0) (v_ROWS_IN_A v_COLUMNS_IN_A: usize) - {| i2: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} - {| i3: Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 v_Shake128X4 |} + {| i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} (seed: t_Array u8 (sz 34)) : Prims.Pure (t_Array @@ -40,10 +38,9 @@ val matrix_A_4_by_4_ v_ROWS_IN_A) Prims.l_True (fun _ -> Prims.l_True) val matrix_A_6_by_5_ - (#v_SIMDUnit #v_Shake128X4: Type0) + (#v_SIMDUnit: Type0) (v_ROWS_IN_A v_COLUMNS_IN_A: usize) - {| i2: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} - {| i3: Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 v_Shake128X4 |} + {| i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} (seed: t_Array u8 (sz 34)) : Prims.Pure (t_Array @@ -51,10 +48,9 @@ val matrix_A_6_by_5_ v_ROWS_IN_A) Prims.l_True (fun _ -> Prims.l_True) val matrix_A_8_by_7_ - (#v_SIMDUnit #v_Shake128X4: Type0) + (#v_SIMDUnit: Type0) (v_ROWS_IN_A v_COLUMNS_IN_A: usize) - {| i2: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} - {| i3: Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 v_Shake128X4 |} + {| i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} (seed: t_Array u8 (sz 34)) : Prims.Pure (t_Array @@ -62,10 +58,9 @@ val matrix_A_8_by_7_ v_ROWS_IN_A) Prims.l_True (fun _ -> Prims.l_True) val matrix_A - (#v_SIMDUnit #v_Shake128X4: Type0) + (#v_SIMDUnit: Type0) (v_ROWS_IN_A v_COLUMNS_IN_A: usize) - {| i2: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} - {| i3: Libcrux_ml_dsa.Hash_functions.Shake128.t_XofX4 v_Shake128X4 |} + {| i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} (seed: t_Array u8 (sz 34)) : Prims.Pure (t_Array diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.Vector_type.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.Vector_type.fst index e220b31db..8dc299c31 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.Vector_type.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.Vector_type.fst @@ -3,6 +3,14 @@ module Libcrux_ml_dsa.Simd.Avx2.Vector_type open Core open FStar.Mul +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl: Core.Convert.t_From t_AVX2SIMDUnit u8 = + { + f_from_pre = (fun (coefficients: u8) -> true); + f_from_post = (fun (coefficients: u8) (out: t_AVX2SIMDUnit) -> true); + f_from = fun (coefficients: u8) -> { f_coefficients = coefficients } <: t_AVX2SIMDUnit + } + let v_ZERO (_: Prims.unit) = Core.Convert.f_into #u8 #t_AVX2SIMDUnit diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.Vector_type.fsti b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.Vector_type.fsti index 052da1273..e14bacddd 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.Vector_type.fsti +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.Vector_type.fsti @@ -6,12 +6,7 @@ open FStar.Mul type t_AVX2SIMDUnit = { f_coefficients:u8 } [@@ FStar.Tactics.Typeclasses.tcinstance] -let impl: Core.Convert.t_From t_AVX2SIMDUnit u8 = - { - f_from_pre = (fun (coefficients: u8) -> true); - f_from_post = (fun (coefficients: u8) (out: t_AVX2SIMDUnit) -> true); - f_from = fun (coefficients: u8) -> { f_coefficients = coefficients } <: t_AVX2SIMDUnit - } +val impl:Core.Convert.t_From t_AVX2SIMDUnit u8 val v_ZERO: Prims.unit -> Prims.Pure t_AVX2SIMDUnit Prims.l_True (fun _ -> Prims.l_True) diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.fst new file mode 100644 index 000000000..3c5867826 --- /dev/null +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.fst @@ -0,0 +1,568 @@ +module Libcrux_ml_dsa.Simd.Avx2 +#set-options "--fuel 0 --ifuel 1 --z3rlimit 100" +open Core +open FStar.Mul + +let _ = + (* This module has implicit dependencies, here we make them explicit. *) + (* The implicit dependencies arise from typeclasses instances. *) + let open Libcrux_ml_dsa.Simd.Avx2.Vector_type in + () + +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl: Libcrux_ml_dsa.Simd.Traits.t_Operations +Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit = + { + _super_11581440318597584651 = FStar.Tactics.Typeclasses.solve; + _super_9442900250278684536 = FStar.Tactics.Typeclasses.solve; + f_ZERO_pre = (fun (_: Prims.unit) -> true); + f_ZERO_post + = + (fun (_: Prims.unit) (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); + f_ZERO = (fun (_: Prims.unit) -> Libcrux_ml_dsa.Simd.Avx2.Vector_type.v_ZERO ()); + f_from_coefficient_array_pre = (fun (coefficient_array: t_Slice i32) -> true); + f_from_coefficient_array_post + = + (fun + (coefficient_array: t_Slice i32) + (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_from_coefficient_array + = + (fun (coefficient_array: t_Slice i32) -> + Libcrux_ml_dsa.Simd.Avx2.Vector_type.from_coefficient_array coefficient_array); + f_to_coefficient_array_pre + = + (fun (self: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); + f_to_coefficient_array_post + = + (fun (self: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) (out: t_Array i32 (sz 8)) -> + true); + f_to_coefficient_array + = + (fun (self: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> + Libcrux_ml_dsa.Simd.Avx2.Vector_type.to_coefficient_array self); + f_add_pre + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_add_post + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_add + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + (Libcrux_ml_dsa.Simd.Avx2.Arithmetic.add lhs + .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + rhs.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + <: + u8)); + f_subtract_pre + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_subtract_post + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_subtract + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + (Libcrux_ml_dsa.Simd.Avx2.Arithmetic.subtract lhs + .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + rhs.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + <: + u8)); + f_montgomery_multiply_pre + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_montgomery_multiply_post + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_montgomery_multiply + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + (Libcrux_ml_dsa.Simd.Avx2.Arithmetic.montgomery_multiply lhs + .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + rhs.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + <: + u8)); + f_shift_left_then_reduce_pre + = + (fun (v_SHIFT_BY: i32) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); + f_shift_left_then_reduce_post + = + (fun + (v_SHIFT_BY: i32) + (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_shift_left_then_reduce + = + (fun (v_SHIFT_BY: i32) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + (Libcrux_ml_dsa.Simd.Avx2.Arithmetic.shift_left_then_reduce v_SHIFT_BY + simd_unit.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + <: + u8)); + f_power2round_pre + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); + f_power2round_post + = + (fun + (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: + (Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit & + Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)) + -> + true); + f_power2round + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> + let lower, upper:(u8 & u8) = + Libcrux_ml_dsa.Simd.Avx2.Arithmetic.power2round simd_unit + .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + in + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + lower, + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + upper + <: + (Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit & + Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)); + f_infinity_norm_exceeds_pre + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) (bound: i32) -> true); + f_infinity_norm_exceeds_post + = + (fun + (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (bound: i32) + (out: bool) + -> + true); + f_infinity_norm_exceeds + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) (bound: i32) -> + Libcrux_ml_dsa.Simd.Avx2.Arithmetic.infinity_norm_exceeds simd_unit + .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + bound); + f_decompose_pre + = + (fun (v_GAMMA2: i32) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); + f_decompose_post + = + (fun + (v_GAMMA2: i32) + (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: + (Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit & + Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)) + -> + true); + f_decompose + = + (fun (v_GAMMA2: i32) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> + let lower, upper:(u8 & u8) = + Libcrux_ml_dsa.Simd.Avx2.Arithmetic.decompose v_GAMMA2 + simd_unit.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + in + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + lower, + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + upper + <: + (Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit & + Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)); + f_compute_hint_pre + = + (fun + (v_GAMMA2: i32) + (low: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (high: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_compute_hint_post + = + (fun + (v_GAMMA2: i32) + (low: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (high: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: (usize & Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)) + -> + true); + f_compute_hint + = + (fun + (v_GAMMA2: i32) + (low: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (high: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + let count, hint:(usize & u8) = + Libcrux_ml_dsa.Simd.Avx2.Arithmetic.compute_hint v_GAMMA2 + low.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + high.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + in + count, + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + hint + <: + (usize & Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)); + f_use_hint_pre + = + (fun + (v_GAMMA2: i32) + (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (hint: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_use_hint_post + = + (fun + (v_GAMMA2: i32) + (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (hint: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_use_hint + = + (fun + (v_GAMMA2: i32) + (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (hint: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + (Libcrux_ml_dsa.Simd.Avx2.Arithmetic.use_hint v_GAMMA2 + simd_unit.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + hint.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + <: + u8)); + f_rejection_sample_less_than_field_modulus_pre + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) -> true); + f_rejection_sample_less_than_field_modulus_post + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) (out2: (t_Slice i32 & usize)) -> true); + f_rejection_sample_less_than_field_modulus + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) -> + let tmp0, out1:(t_Slice i32 & usize) = + Libcrux_ml_dsa.Simd.Avx2.Rejection_sample.Less_than_field_modulus.sample randomness out + in + let out:t_Slice i32 = tmp0 in + let hax_temp_output:usize = out1 in + out, hax_temp_output <: (t_Slice i32 & usize)); + f_rejection_sample_less_than_eta_equals_2_pre + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) -> true); + f_rejection_sample_less_than_eta_equals_2_post + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) (out2: (t_Slice i32 & usize)) -> true); + f_rejection_sample_less_than_eta_equals_2_ + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) -> + let tmp0, out1:(t_Slice i32 & usize) = + Libcrux_ml_dsa.Simd.Avx2.Rejection_sample.Less_than_eta.sample (sz 2) randomness out + in + let out:t_Slice i32 = tmp0 in + let hax_temp_output:usize = out1 in + out, hax_temp_output <: (t_Slice i32 & usize)); + f_rejection_sample_less_than_eta_equals_4_pre + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) -> true); + f_rejection_sample_less_than_eta_equals_4_post + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) (out2: (t_Slice i32 & usize)) -> true); + f_rejection_sample_less_than_eta_equals_4_ + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) -> + let tmp0, out1:(t_Slice i32 & usize) = + Libcrux_ml_dsa.Simd.Avx2.Rejection_sample.Less_than_eta.sample (sz 4) randomness out + in + let out:t_Slice i32 = tmp0 in + let hax_temp_output:usize = out1 in + out, hax_temp_output <: (t_Slice i32 & usize)); + f_gamma1_serialize_pre + = + (fun (v_OUTPUT_SIZE: usize) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> + true); + f_gamma1_serialize_post + = + (fun + (v_OUTPUT_SIZE: usize) + (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: t_Array u8 v_OUTPUT_SIZE) + -> + true); + f_gamma1_serialize + = + (fun (v_OUTPUT_SIZE: usize) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> + Libcrux_ml_dsa.Simd.Avx2.Encoding.Gamma1.serialize v_OUTPUT_SIZE + simd_unit.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients); + f_gamma1_deserialize_pre = (fun (v_GAMMA1_EXPONENT: usize) (serialized: t_Slice u8) -> true); + f_gamma1_deserialize_post + = + (fun + (v_GAMMA1_EXPONENT: usize) + (serialized: t_Slice u8) + (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_gamma1_deserialize + = + (fun (v_GAMMA1_EXPONENT: usize) (serialized: t_Slice u8) -> + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + (Libcrux_ml_dsa.Simd.Avx2.Encoding.Gamma1.deserialize v_GAMMA1_EXPONENT serialized <: u8)); + f_commitment_serialize_pre + = + (fun (v_OUTPUT_SIZE: usize) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> + true); + f_commitment_serialize_post + = + (fun + (v_OUTPUT_SIZE: usize) + (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: t_Array u8 v_OUTPUT_SIZE) + -> + true); + f_commitment_serialize + = + (fun (v_OUTPUT_SIZE: usize) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> + Libcrux_ml_dsa.Simd.Avx2.Encoding.Commitment.serialize v_OUTPUT_SIZE + simd_unit.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients); + f_error_serialize_pre + = + (fun (v_OUTPUT_SIZE: usize) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> + true); + f_error_serialize_post + = + (fun + (v_OUTPUT_SIZE: usize) + (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: t_Array u8 v_OUTPUT_SIZE) + -> + true); + f_error_serialize + = + (fun (v_OUTPUT_SIZE: usize) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> + Libcrux_ml_dsa.Simd.Avx2.Encoding.Error.serialize v_OUTPUT_SIZE + simd_unit.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients); + f_error_deserialize_pre = (fun (v_ETA: usize) (serialized: t_Slice u8) -> true); + f_error_deserialize_post + = + (fun + (v_ETA: usize) + (serialized: t_Slice u8) + (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + -> + true); + f_error_deserialize + = + (fun (v_ETA: usize) (serialized: t_Slice u8) -> + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + (Libcrux_ml_dsa.Simd.Avx2.Encoding.Error.deserialize v_ETA serialized <: u8)); + f_t0_serialize_pre + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); + f_t0_serialize_post + = + (fun + (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: t_Array u8 (sz 13)) + -> + true); + f_t0_serialize + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> + Libcrux_ml_dsa.Simd.Avx2.Encoding.T0.serialize simd_unit + .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients); + f_t0_deserialize_pre = (fun (serialized: t_Slice u8) -> true); + f_t0_deserialize_post + = + (fun (serialized: t_Slice u8) (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true + ); + f_t0_deserialize + = + (fun (serialized: t_Slice u8) -> + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + (Libcrux_ml_dsa.Simd.Avx2.Encoding.T0.deserialize serialized <: u8)); + f_t1_serialize_pre + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); + f_t1_serialize_post + = + (fun + (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + (out: t_Array u8 (sz 10)) + -> + true); + f_t1_serialize + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> + Libcrux_ml_dsa.Simd.Avx2.Encoding.T1.serialize simd_unit + .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients); + f_t1_deserialize_pre = (fun (serialized: t_Slice u8) -> true); + f_t1_deserialize_post + = + (fun (serialized: t_Slice u8) (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true + ); + f_t1_deserialize + = + (fun (serialized: t_Slice u8) -> + Core.Convert.f_into #u8 + #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + #FStar.Tactics.Typeclasses.solve + (Libcrux_ml_dsa.Simd.Avx2.Encoding.T1.deserialize serialized <: u8)); + f_ntt_pre + = + (fun (simd_units: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) -> true); + f_ntt_post + = + (fun + (simd_units: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) + (out: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) + -> + true); + f_ntt + = + (fun (simd_units: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) -> + let re:t_Array u8 (sz 32) = + Rust_primitives.Hax.repeat (Libcrux_intrinsics.Avx2_extract.mm256_setzero_si256 () <: u8) + (sz 32) + in + let re:t_Array u8 (sz 32) = + Rust_primitives.Hax.Folds.fold_range (sz 0) + Libcrux_ml_dsa.Simd.Traits.v_SIMD_UNITS_IN_RING_ELEMENT + (fun re temp_1_ -> + let re:t_Array u8 (sz 32) = re in + let _:usize = temp_1_ in + true) + re + (fun re i -> + let re:t_Array u8 (sz 32) = re in + let i:usize = i in + Rust_primitives.Hax.Monomorphized_update_at.update_at_usize re + i + (simd_units.[ i ] <: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + <: + t_Array u8 (sz 32)) + in + let result:t_Array u8 (sz 32) = Libcrux_ml_dsa.Simd.Avx2.Ntt.ntt re in + Core.Array.from_fn #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + (sz 32) + (fun i -> + let i:usize = i in + { Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients = result.[ i ] <: u8 } + <: + Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)); + f_invert_ntt_montgomery_pre + = + (fun (simd_units: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) -> true); + f_invert_ntt_montgomery_post + = + (fun + (simd_units: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) + (out: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) + -> + true); + f_invert_ntt_montgomery + = + fun (simd_units: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) -> + let re:t_Array u8 (sz 32) = + Rust_primitives.Hax.repeat (Libcrux_intrinsics.Avx2_extract.mm256_setzero_si256 () <: u8) + (sz 32) + in + let re:t_Array u8 (sz 32) = + Rust_primitives.Hax.Folds.fold_range (sz 0) + Libcrux_ml_dsa.Simd.Traits.v_SIMD_UNITS_IN_RING_ELEMENT + (fun re temp_1_ -> + let re:t_Array u8 (sz 32) = re in + let _:usize = temp_1_ in + true) + re + (fun re i -> + let re:t_Array u8 (sz 32) = re in + let i:usize = i in + Rust_primitives.Hax.Monomorphized_update_at.update_at_usize re + i + (simd_units.[ i ] <: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients + <: + t_Array u8 (sz 32)) + in + let result:t_Array u8 (sz 32) = Libcrux_ml_dsa.Simd.Avx2.Invntt.invert_ntt_montgomery re in + Core.Array.from_fn #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit + (sz 32) + (fun i -> + let i:usize = i in + { Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients = result.[ i ] <: u8 } + <: + Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) + } diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.fsti b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.fsti index 8ff985c8c..708395ec3 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.fsti +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Avx2.fsti @@ -10,545 +10,4 @@ let _ = () [@@ FStar.Tactics.Typeclasses.tcinstance] -let impl: Libcrux_ml_dsa.Simd.Traits.t_Operations -Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit = - { - _super_11581440318597584651 = FStar.Tactics.Typeclasses.solve; - _super_9442900250278684536 = FStar.Tactics.Typeclasses.solve; - f_ZERO_pre = (fun (_: Prims.unit) -> true); - f_ZERO_post - = - (fun (_: Prims.unit) (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); - f_ZERO = (fun (_: Prims.unit) -> Libcrux_ml_dsa.Simd.Avx2.Vector_type.v_ZERO ()); - f_from_coefficient_array_pre = (fun (coefficient_array: t_Slice i32) -> true); - f_from_coefficient_array_post - = - (fun - (coefficient_array: t_Slice i32) - (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_from_coefficient_array - = - (fun (coefficient_array: t_Slice i32) -> - Libcrux_ml_dsa.Simd.Avx2.Vector_type.from_coefficient_array coefficient_array); - f_to_coefficient_array_pre - = - (fun (self: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); - f_to_coefficient_array_post - = - (fun (self: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) (out: t_Array i32 (sz 8)) -> - true); - f_to_coefficient_array - = - (fun (self: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> - Libcrux_ml_dsa.Simd.Avx2.Vector_type.to_coefficient_array self); - f_add_pre - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_add_post - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_add - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - (Libcrux_ml_dsa.Simd.Avx2.Arithmetic.add lhs - .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - rhs.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - <: - u8)); - f_subtract_pre - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_subtract_post - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_subtract - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - (Libcrux_ml_dsa.Simd.Avx2.Arithmetic.subtract lhs - .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - rhs.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - <: - u8)); - f_montgomery_multiply_pre - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_montgomery_multiply_post - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_montgomery_multiply - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - (Libcrux_ml_dsa.Simd.Avx2.Arithmetic.montgomery_multiply lhs - .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - rhs.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - <: - u8)); - f_shift_left_then_reduce_pre - = - (fun (v_SHIFT_BY: i32) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); - f_shift_left_then_reduce_post - = - (fun - (v_SHIFT_BY: i32) - (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_shift_left_then_reduce - = - (fun (v_SHIFT_BY: i32) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - (Libcrux_ml_dsa.Simd.Avx2.Arithmetic.shift_left_then_reduce v_SHIFT_BY - simd_unit.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - <: - u8)); - f_power2round_pre - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); - f_power2round_post - = - (fun - (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: - (Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit & - Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)) - -> - true); - f_power2round - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> - let lower, upper:(u8 & u8) = - Libcrux_ml_dsa.Simd.Avx2.Arithmetic.power2round simd_unit - .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - in - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - lower, - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - upper - <: - (Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit & - Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)); - f_infinity_norm_exceeds_pre - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) (bound: i32) -> true); - f_infinity_norm_exceeds_post - = - (fun - (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (bound: i32) - (out: bool) - -> - true); - f_infinity_norm_exceeds - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) (bound: i32) -> - Libcrux_ml_dsa.Simd.Avx2.Arithmetic.infinity_norm_exceeds simd_unit - .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - bound); - f_decompose_pre - = - (fun (v_GAMMA2: i32) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); - f_decompose_post - = - (fun - (v_GAMMA2: i32) - (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: - (Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit & - Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)) - -> - true); - f_decompose - = - (fun (v_GAMMA2: i32) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> - let lower, upper:(u8 & u8) = - Libcrux_ml_dsa.Simd.Avx2.Arithmetic.decompose v_GAMMA2 - simd_unit.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - in - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - lower, - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - upper - <: - (Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit & - Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)); - f_compute_hint_pre - = - (fun - (v_GAMMA2: i32) - (low: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (high: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_compute_hint_post - = - (fun - (v_GAMMA2: i32) - (low: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (high: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: (usize & Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)) - -> - true); - f_compute_hint - = - (fun - (v_GAMMA2: i32) - (low: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (high: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - let count, hint:(usize & u8) = - Libcrux_ml_dsa.Simd.Avx2.Arithmetic.compute_hint v_GAMMA2 - low.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - high.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - in - count, - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - hint - <: - (usize & Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)); - f_use_hint_pre - = - (fun - (v_GAMMA2: i32) - (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (hint: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_use_hint_post - = - (fun - (v_GAMMA2: i32) - (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (hint: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_use_hint - = - (fun - (v_GAMMA2: i32) - (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (hint: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - (Libcrux_ml_dsa.Simd.Avx2.Arithmetic.use_hint v_GAMMA2 - simd_unit.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - hint.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients - <: - u8)); - f_rejection_sample_less_than_field_modulus_pre - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) -> true); - f_rejection_sample_less_than_field_modulus_post - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) (out2: (t_Slice i32 & usize)) -> true); - f_rejection_sample_less_than_field_modulus - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) -> - let tmp0, out1:(t_Slice i32 & usize) = - Libcrux_ml_dsa.Simd.Avx2.Rejection_sample.Less_than_field_modulus.sample randomness out - in - let out:t_Slice i32 = tmp0 in - let hax_temp_output:usize = out1 in - out, hax_temp_output <: (t_Slice i32 & usize)); - f_rejection_sample_less_than_eta_equals_2_pre - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) -> true); - f_rejection_sample_less_than_eta_equals_2_post - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) (out2: (t_Slice i32 & usize)) -> true); - f_rejection_sample_less_than_eta_equals_2_ - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) -> - let tmp0, out1:(t_Slice i32 & usize) = - Libcrux_ml_dsa.Simd.Avx2.Rejection_sample.Less_than_eta.sample (sz 2) randomness out - in - let out:t_Slice i32 = tmp0 in - let hax_temp_output:usize = out1 in - out, hax_temp_output <: (t_Slice i32 & usize)); - f_rejection_sample_less_than_eta_equals_4_pre - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) -> true); - f_rejection_sample_less_than_eta_equals_4_post - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) (out2: (t_Slice i32 & usize)) -> true); - f_rejection_sample_less_than_eta_equals_4_ - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) -> - let tmp0, out1:(t_Slice i32 & usize) = - Libcrux_ml_dsa.Simd.Avx2.Rejection_sample.Less_than_eta.sample (sz 4) randomness out - in - let out:t_Slice i32 = tmp0 in - let hax_temp_output:usize = out1 in - out, hax_temp_output <: (t_Slice i32 & usize)); - f_gamma1_serialize_pre - = - (fun (v_OUTPUT_SIZE: usize) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> - true); - f_gamma1_serialize_post - = - (fun - (v_OUTPUT_SIZE: usize) - (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: t_Array u8 v_OUTPUT_SIZE) - -> - true); - f_gamma1_serialize - = - (fun (v_OUTPUT_SIZE: usize) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> - Libcrux_ml_dsa.Simd.Avx2.Encoding.Gamma1.serialize v_OUTPUT_SIZE - simd_unit.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients); - f_gamma1_deserialize_pre = (fun (v_GAMMA1_EXPONENT: usize) (serialized: t_Slice u8) -> true); - f_gamma1_deserialize_post - = - (fun - (v_GAMMA1_EXPONENT: usize) - (serialized: t_Slice u8) - (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_gamma1_deserialize - = - (fun (v_GAMMA1_EXPONENT: usize) (serialized: t_Slice u8) -> - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - (Libcrux_ml_dsa.Simd.Avx2.Encoding.Gamma1.deserialize v_GAMMA1_EXPONENT serialized <: u8)); - f_commitment_serialize_pre - = - (fun (v_OUTPUT_SIZE: usize) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> - true); - f_commitment_serialize_post - = - (fun - (v_OUTPUT_SIZE: usize) - (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: t_Array u8 v_OUTPUT_SIZE) - -> - true); - f_commitment_serialize - = - (fun (v_OUTPUT_SIZE: usize) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> - Libcrux_ml_dsa.Simd.Avx2.Encoding.Commitment.serialize v_OUTPUT_SIZE - simd_unit.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients); - f_error_serialize_pre - = - (fun (v_OUTPUT_SIZE: usize) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> - true); - f_error_serialize_post - = - (fun - (v_OUTPUT_SIZE: usize) - (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: t_Array u8 v_OUTPUT_SIZE) - -> - true); - f_error_serialize - = - (fun (v_OUTPUT_SIZE: usize) (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> - Libcrux_ml_dsa.Simd.Avx2.Encoding.Error.serialize v_OUTPUT_SIZE - simd_unit.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients); - f_error_deserialize_pre = (fun (v_ETA: usize) (serialized: t_Slice u8) -> true); - f_error_deserialize_post - = - (fun - (v_ETA: usize) - (serialized: t_Slice u8) - (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - -> - true); - f_error_deserialize - = - (fun (v_ETA: usize) (serialized: t_Slice u8) -> - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - (Libcrux_ml_dsa.Simd.Avx2.Encoding.Error.deserialize v_ETA serialized <: u8)); - f_t0_serialize_pre - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); - f_t0_serialize_post - = - (fun - (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: t_Array u8 (sz 13)) - -> - true); - f_t0_serialize - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> - Libcrux_ml_dsa.Simd.Avx2.Encoding.T0.serialize simd_unit - .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients); - f_t0_deserialize_pre = (fun (serialized: t_Slice u8) -> true); - f_t0_deserialize_post - = - (fun (serialized: t_Slice u8) (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true - ); - f_t0_deserialize - = - (fun (serialized: t_Slice u8) -> - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - (Libcrux_ml_dsa.Simd.Avx2.Encoding.T0.deserialize serialized <: u8)); - f_t1_serialize_pre - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true); - f_t1_serialize_post - = - (fun - (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - (out: t_Array u8 (sz 10)) - -> - true); - f_t1_serialize - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> - Libcrux_ml_dsa.Simd.Avx2.Encoding.T1.serialize simd_unit - .Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients); - f_t1_deserialize_pre = (fun (serialized: t_Slice u8) -> true); - f_t1_deserialize_post - = - (fun (serialized: t_Slice u8) (out: Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) -> true - ); - f_t1_deserialize - = - (fun (serialized: t_Slice u8) -> - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - (Libcrux_ml_dsa.Simd.Avx2.Encoding.T1.deserialize serialized <: u8)); - f_ntt_pre - = - (fun (simd_units: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) -> true); - f_ntt_post - = - (fun - (simd_units: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) - (out: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) - -> - true); - f_ntt - = - (fun (simd_units: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) -> - let result:t_Array u8 (sz 32) = - Libcrux_ml_dsa.Simd.Avx2.Ntt.ntt (Core.Array.impl_23__map #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - (sz 32) - #u8 - simd_units - (fun x -> - let x:Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit = x in - x.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients) - <: - t_Array u8 (sz 32)) - in - Core.Array.impl_23__map #u8 - (sz 32) - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - result - (fun x -> - let x:u8 = x in - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - x - <: - Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit)); - f_invert_ntt_montgomery_pre - = - (fun (simd_units: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) -> true); - f_invert_ntt_montgomery_post - = - (fun - (simd_units: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) - (out: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) - -> - true); - f_invert_ntt_montgomery - = - fun (simd_units: t_Array Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit (sz 32)) -> - let result:t_Array u8 (sz 32) = - Libcrux_ml_dsa.Simd.Avx2.Invntt.invert_ntt_montgomery (Core.Array.impl_23__map #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - (sz 32) - #u8 - simd_units - (fun x -> - let x:Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit = x in - x.Libcrux_ml_dsa.Simd.Avx2.Vector_type.f_coefficients) - <: - t_Array u8 (sz 32)) - in - Core.Array.impl_23__map #u8 - (sz 32) - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - result - (fun x -> - let x:u8 = x in - Core.Convert.f_into #u8 - #Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit - #FStar.Tactics.Typeclasses.solve - x - <: - Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit) - } +val impl:Libcrux_ml_dsa.Simd.Traits.t_Operations Libcrux_ml_dsa.Simd.Avx2.Vector_type.t_AVX2SIMDUnit diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Portable.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Portable.fst new file mode 100644 index 000000000..b5c72724c --- /dev/null +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Portable.fst @@ -0,0 +1,462 @@ +module Libcrux_ml_dsa.Simd.Portable +#set-options "--fuel 0 --ifuel 1 --z3rlimit 100" +open Core +open FStar.Mul + +let _ = + (* This module has implicit dependencies, here we make them explicit. *) + (* The implicit dependencies arise from typeclasses instances. *) + let open Libcrux_ml_dsa.Simd.Portable.Vector_type in + () + +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl: Libcrux_ml_dsa.Simd.Traits.t_Operations +Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit = + { + _super_11581440318597584651 = FStar.Tactics.Typeclasses.solve; + _super_9442900250278684536 = FStar.Tactics.Typeclasses.solve; + f_ZERO_pre = (fun (_: Prims.unit) -> true); + f_ZERO_post + = + (fun (_: Prims.unit) (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> true); + f_ZERO = (fun (_: Prims.unit) -> Libcrux_ml_dsa.Simd.Portable.Vector_type.v_ZERO ()); + f_from_coefficient_array_pre = (fun (array: t_Slice i32) -> true); + f_from_coefficient_array_post + = + (fun (array: t_Slice i32) (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> + true); + f_from_coefficient_array + = + (fun (array: t_Slice i32) -> + Libcrux_ml_dsa.Simd.Portable.Vector_type.from_coefficient_array array); + f_to_coefficient_array_pre + = + (fun (self: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> true); + f_to_coefficient_array_post + = + (fun + (self: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: t_Array i32 (sz 8)) + -> + true); + f_to_coefficient_array + = + (fun (self: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> + Libcrux_ml_dsa.Simd.Portable.Vector_type.to_coefficient_array self); + f_add_pre + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_add_post + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_add + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + Libcrux_ml_dsa.Simd.Portable.Arithmetic.add lhs rhs); + f_subtract_pre + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_subtract_post + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_subtract + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + Libcrux_ml_dsa.Simd.Portable.Arithmetic.subtract lhs rhs); + f_montgomery_multiply_pre + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_montgomery_multiply_post + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_montgomery_multiply + = + (fun + (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + Libcrux_ml_dsa.Simd.Portable.Arithmetic.montgomery_multiply lhs rhs); + f_shift_left_then_reduce_pre + = + (fun + (v_SHIFT_BY: i32) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_shift_left_then_reduce_post + = + (fun + (v_SHIFT_BY: i32) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_shift_left_then_reduce + = + (fun + (v_SHIFT_BY: i32) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + Libcrux_ml_dsa.Simd.Portable.Arithmetic.shift_left_then_reduce v_SHIFT_BY simd_unit); + f_power2round_pre + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> true); + f_power2round_post + = + (fun + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: + (Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit & + Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit)) + -> + true); + f_power2round + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> + Libcrux_ml_dsa.Simd.Portable.Arithmetic.power2round simd_unit); + f_infinity_norm_exceeds_pre + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) (bound: i32) -> + true); + f_infinity_norm_exceeds_post + = + (fun + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (bound: i32) + (out: bool) + -> + true); + f_infinity_norm_exceeds + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) (bound: i32) -> + Libcrux_ml_dsa.Simd.Portable.Arithmetic.infinity_norm_exceeds simd_unit bound); + f_decompose_pre + = + (fun (v_GAMMA2: i32) (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> + true); + f_decompose_post + = + (fun + (v_GAMMA2: i32) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: + (Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit & + Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit)) + -> + true); + f_decompose + = + (fun (v_GAMMA2: i32) (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> + Libcrux_ml_dsa.Simd.Portable.Arithmetic.decompose v_GAMMA2 simd_unit); + f_compute_hint_pre + = + (fun + (v_GAMMA2: i32) + (low: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (high: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_compute_hint_post + = + (fun + (v_GAMMA2: i32) + (low: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (high: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: (usize & Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit)) + -> + true); + f_compute_hint + = + (fun + (v_GAMMA2: i32) + (low: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (high: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + Libcrux_ml_dsa.Simd.Portable.Arithmetic.compute_hint v_GAMMA2 low high); + f_use_hint_pre + = + (fun + (v_GAMMA2: i32) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (hint: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_use_hint_post + = + (fun + (v_GAMMA2: i32) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (hint: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_use_hint + = + (fun + (v_GAMMA2: i32) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (hint: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + Libcrux_ml_dsa.Simd.Portable.Arithmetic.use_hint v_GAMMA2 simd_unit hint); + f_rejection_sample_less_than_field_modulus_pre + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) -> true); + f_rejection_sample_less_than_field_modulus_post + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) (out2: (t_Slice i32 & usize)) -> true); + f_rejection_sample_less_than_field_modulus + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) -> + let tmp0, out1:(t_Slice i32 & usize) = + Libcrux_ml_dsa.Simd.Portable.Sample.rejection_sample_less_than_field_modulus randomness + out + in + let out:t_Slice i32 = tmp0 in + let hax_temp_output:usize = out1 in + out, hax_temp_output <: (t_Slice i32 & usize)); + f_rejection_sample_less_than_eta_equals_2_pre + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) -> true); + f_rejection_sample_less_than_eta_equals_2_post + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) (out2: (t_Slice i32 & usize)) -> true); + f_rejection_sample_less_than_eta_equals_2_ + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) -> + let tmp0, out1:(t_Slice i32 & usize) = + Libcrux_ml_dsa.Simd.Portable.Sample.rejection_sample_less_than_eta_equals_2_ randomness + out + in + let out:t_Slice i32 = tmp0 in + let hax_temp_output:usize = out1 in + out, hax_temp_output <: (t_Slice i32 & usize)); + f_rejection_sample_less_than_eta_equals_4_pre + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) -> true); + f_rejection_sample_less_than_eta_equals_4_post + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) (out2: (t_Slice i32 & usize)) -> true); + f_rejection_sample_less_than_eta_equals_4_ + = + (fun (randomness: t_Slice u8) (out: t_Slice i32) -> + let tmp0, out1:(t_Slice i32 & usize) = + Libcrux_ml_dsa.Simd.Portable.Sample.rejection_sample_less_than_eta_equals_4_ randomness + out + in + let out:t_Slice i32 = tmp0 in + let hax_temp_output:usize = out1 in + out, hax_temp_output <: (t_Slice i32 & usize)); + f_gamma1_serialize_pre + = + (fun + (v_OUTPUT_SIZE: usize) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_gamma1_serialize_post + = + (fun + (v_OUTPUT_SIZE: usize) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: t_Array u8 v_OUTPUT_SIZE) + -> + true); + f_gamma1_serialize + = + (fun + (v_OUTPUT_SIZE: usize) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + Libcrux_ml_dsa.Simd.Portable.Encoding.Gamma1.serialize v_OUTPUT_SIZE simd_unit); + f_gamma1_deserialize_pre = (fun (v_GAMMA1_EXPONENT: usize) (serialized: t_Slice u8) -> true); + f_gamma1_deserialize_post + = + (fun + (v_GAMMA1_EXPONENT: usize) + (serialized: t_Slice u8) + (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_gamma1_deserialize + = + (fun (v_GAMMA1_EXPONENT: usize) (serialized: t_Slice u8) -> + Libcrux_ml_dsa.Simd.Portable.Encoding.Gamma1.deserialize v_GAMMA1_EXPONENT serialized); + f_commitment_serialize_pre + = + (fun + (v_OUTPUT_SIZE: usize) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_commitment_serialize_post + = + (fun + (v_OUTPUT_SIZE: usize) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: t_Array u8 v_OUTPUT_SIZE) + -> + true); + f_commitment_serialize + = + (fun + (v_OUTPUT_SIZE: usize) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + Libcrux_ml_dsa.Simd.Portable.Encoding.Commitment.serialize v_OUTPUT_SIZE simd_unit); + f_error_serialize_pre + = + (fun + (v_OUTPUT_SIZE: usize) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_error_serialize_post + = + (fun + (v_OUTPUT_SIZE: usize) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: t_Array u8 v_OUTPUT_SIZE) + -> + true); + f_error_serialize + = + (fun + (v_OUTPUT_SIZE: usize) + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + Libcrux_ml_dsa.Simd.Portable.Encoding.Error.serialize v_OUTPUT_SIZE simd_unit); + f_error_deserialize_pre = (fun (v_ETA: usize) (serialized: t_Slice u8) -> true); + f_error_deserialize_post + = + (fun + (v_ETA: usize) + (serialized: t_Slice u8) + (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_error_deserialize + = + (fun (v_ETA: usize) (serialized: t_Slice u8) -> + Libcrux_ml_dsa.Simd.Portable.Encoding.Error.deserialize v_ETA serialized); + f_t0_serialize_pre + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> true); + f_t0_serialize_post + = + (fun + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: t_Array u8 (sz 13)) + -> + true); + f_t0_serialize + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> + Libcrux_ml_dsa.Simd.Portable.Encoding.T0.serialize simd_unit); + f_t0_deserialize_pre = (fun (serialized: t_Slice u8) -> true); + f_t0_deserialize_post + = + (fun + (serialized: t_Slice u8) + (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_t0_deserialize + = + (fun (serialized: t_Slice u8) -> Libcrux_ml_dsa.Simd.Portable.Encoding.T0.deserialize serialized + ); + f_t1_serialize_pre + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> true); + f_t1_serialize_post + = + (fun + (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + (out: t_Array u8 (sz 10)) + -> + true); + f_t1_serialize + = + (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> + Libcrux_ml_dsa.Simd.Portable.Encoding.T1.serialize simd_unit); + f_t1_deserialize_pre = (fun (serialized: t_Slice u8) -> true); + f_t1_deserialize_post + = + (fun + (serialized: t_Slice u8) + (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) + -> + true); + f_t1_deserialize + = + (fun (serialized: t_Slice u8) -> Libcrux_ml_dsa.Simd.Portable.Encoding.T1.deserialize serialized + ); + f_ntt_pre + = + (fun + (simd_units: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) + -> + true); + f_ntt_post + = + (fun + (simd_units: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) + (out: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) + -> + true); + f_ntt + = + (fun + (simd_units: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) + -> + Libcrux_ml_dsa.Simd.Portable.Ntt.ntt simd_units); + f_invert_ntt_montgomery_pre + = + (fun + (simd_units: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) + -> + true); + f_invert_ntt_montgomery_post + = + (fun + (simd_units: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) + (out: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) + -> + true); + f_invert_ntt_montgomery + = + fun (simd_units: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) -> + Libcrux_ml_dsa.Simd.Portable.Invntt.invert_ntt_montgomery simd_units + } diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Portable.fsti b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Portable.fsti index b5c72724c..c3bcf3d6d 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Portable.fsti +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Simd.Portable.fsti @@ -10,453 +10,5 @@ let _ = () [@@ FStar.Tactics.Typeclasses.tcinstance] -let impl: Libcrux_ml_dsa.Simd.Traits.t_Operations -Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit = - { - _super_11581440318597584651 = FStar.Tactics.Typeclasses.solve; - _super_9442900250278684536 = FStar.Tactics.Typeclasses.solve; - f_ZERO_pre = (fun (_: Prims.unit) -> true); - f_ZERO_post - = - (fun (_: Prims.unit) (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> true); - f_ZERO = (fun (_: Prims.unit) -> Libcrux_ml_dsa.Simd.Portable.Vector_type.v_ZERO ()); - f_from_coefficient_array_pre = (fun (array: t_Slice i32) -> true); - f_from_coefficient_array_post - = - (fun (array: t_Slice i32) (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> - true); - f_from_coefficient_array - = - (fun (array: t_Slice i32) -> - Libcrux_ml_dsa.Simd.Portable.Vector_type.from_coefficient_array array); - f_to_coefficient_array_pre - = - (fun (self: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> true); - f_to_coefficient_array_post - = - (fun - (self: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: t_Array i32 (sz 8)) - -> - true); - f_to_coefficient_array - = - (fun (self: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> - Libcrux_ml_dsa.Simd.Portable.Vector_type.to_coefficient_array self); - f_add_pre - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_add_post - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_add - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - Libcrux_ml_dsa.Simd.Portable.Arithmetic.add lhs rhs); - f_subtract_pre - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_subtract_post - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_subtract - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - Libcrux_ml_dsa.Simd.Portable.Arithmetic.subtract lhs rhs); - f_montgomery_multiply_pre - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_montgomery_multiply_post - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_montgomery_multiply - = - (fun - (lhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (rhs: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - Libcrux_ml_dsa.Simd.Portable.Arithmetic.montgomery_multiply lhs rhs); - f_shift_left_then_reduce_pre - = - (fun - (v_SHIFT_BY: i32) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_shift_left_then_reduce_post - = - (fun - (v_SHIFT_BY: i32) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_shift_left_then_reduce - = - (fun - (v_SHIFT_BY: i32) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - Libcrux_ml_dsa.Simd.Portable.Arithmetic.shift_left_then_reduce v_SHIFT_BY simd_unit); - f_power2round_pre - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> true); - f_power2round_post - = - (fun - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: - (Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit & - Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit)) - -> - true); - f_power2round - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> - Libcrux_ml_dsa.Simd.Portable.Arithmetic.power2round simd_unit); - f_infinity_norm_exceeds_pre - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) (bound: i32) -> - true); - f_infinity_norm_exceeds_post - = - (fun - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (bound: i32) - (out: bool) - -> - true); - f_infinity_norm_exceeds - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) (bound: i32) -> - Libcrux_ml_dsa.Simd.Portable.Arithmetic.infinity_norm_exceeds simd_unit bound); - f_decompose_pre - = - (fun (v_GAMMA2: i32) (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> - true); - f_decompose_post - = - (fun - (v_GAMMA2: i32) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: - (Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit & - Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit)) - -> - true); - f_decompose - = - (fun (v_GAMMA2: i32) (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> - Libcrux_ml_dsa.Simd.Portable.Arithmetic.decompose v_GAMMA2 simd_unit); - f_compute_hint_pre - = - (fun - (v_GAMMA2: i32) - (low: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (high: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_compute_hint_post - = - (fun - (v_GAMMA2: i32) - (low: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (high: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: (usize & Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit)) - -> - true); - f_compute_hint - = - (fun - (v_GAMMA2: i32) - (low: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (high: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - Libcrux_ml_dsa.Simd.Portable.Arithmetic.compute_hint v_GAMMA2 low high); - f_use_hint_pre - = - (fun - (v_GAMMA2: i32) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (hint: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_use_hint_post - = - (fun - (v_GAMMA2: i32) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (hint: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_use_hint - = - (fun - (v_GAMMA2: i32) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (hint: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - Libcrux_ml_dsa.Simd.Portable.Arithmetic.use_hint v_GAMMA2 simd_unit hint); - f_rejection_sample_less_than_field_modulus_pre - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) -> true); - f_rejection_sample_less_than_field_modulus_post - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) (out2: (t_Slice i32 & usize)) -> true); - f_rejection_sample_less_than_field_modulus - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) -> - let tmp0, out1:(t_Slice i32 & usize) = - Libcrux_ml_dsa.Simd.Portable.Sample.rejection_sample_less_than_field_modulus randomness - out - in - let out:t_Slice i32 = tmp0 in - let hax_temp_output:usize = out1 in - out, hax_temp_output <: (t_Slice i32 & usize)); - f_rejection_sample_less_than_eta_equals_2_pre - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) -> true); - f_rejection_sample_less_than_eta_equals_2_post - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) (out2: (t_Slice i32 & usize)) -> true); - f_rejection_sample_less_than_eta_equals_2_ - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) -> - let tmp0, out1:(t_Slice i32 & usize) = - Libcrux_ml_dsa.Simd.Portable.Sample.rejection_sample_less_than_eta_equals_2_ randomness - out - in - let out:t_Slice i32 = tmp0 in - let hax_temp_output:usize = out1 in - out, hax_temp_output <: (t_Slice i32 & usize)); - f_rejection_sample_less_than_eta_equals_4_pre - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) -> true); - f_rejection_sample_less_than_eta_equals_4_post - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) (out2: (t_Slice i32 & usize)) -> true); - f_rejection_sample_less_than_eta_equals_4_ - = - (fun (randomness: t_Slice u8) (out: t_Slice i32) -> - let tmp0, out1:(t_Slice i32 & usize) = - Libcrux_ml_dsa.Simd.Portable.Sample.rejection_sample_less_than_eta_equals_4_ randomness - out - in - let out:t_Slice i32 = tmp0 in - let hax_temp_output:usize = out1 in - out, hax_temp_output <: (t_Slice i32 & usize)); - f_gamma1_serialize_pre - = - (fun - (v_OUTPUT_SIZE: usize) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_gamma1_serialize_post - = - (fun - (v_OUTPUT_SIZE: usize) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: t_Array u8 v_OUTPUT_SIZE) - -> - true); - f_gamma1_serialize - = - (fun - (v_OUTPUT_SIZE: usize) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - Libcrux_ml_dsa.Simd.Portable.Encoding.Gamma1.serialize v_OUTPUT_SIZE simd_unit); - f_gamma1_deserialize_pre = (fun (v_GAMMA1_EXPONENT: usize) (serialized: t_Slice u8) -> true); - f_gamma1_deserialize_post - = - (fun - (v_GAMMA1_EXPONENT: usize) - (serialized: t_Slice u8) - (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_gamma1_deserialize - = - (fun (v_GAMMA1_EXPONENT: usize) (serialized: t_Slice u8) -> - Libcrux_ml_dsa.Simd.Portable.Encoding.Gamma1.deserialize v_GAMMA1_EXPONENT serialized); - f_commitment_serialize_pre - = - (fun - (v_OUTPUT_SIZE: usize) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_commitment_serialize_post - = - (fun - (v_OUTPUT_SIZE: usize) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: t_Array u8 v_OUTPUT_SIZE) - -> - true); - f_commitment_serialize - = - (fun - (v_OUTPUT_SIZE: usize) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - Libcrux_ml_dsa.Simd.Portable.Encoding.Commitment.serialize v_OUTPUT_SIZE simd_unit); - f_error_serialize_pre - = - (fun - (v_OUTPUT_SIZE: usize) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_error_serialize_post - = - (fun - (v_OUTPUT_SIZE: usize) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: t_Array u8 v_OUTPUT_SIZE) - -> - true); - f_error_serialize - = - (fun - (v_OUTPUT_SIZE: usize) - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - Libcrux_ml_dsa.Simd.Portable.Encoding.Error.serialize v_OUTPUT_SIZE simd_unit); - f_error_deserialize_pre = (fun (v_ETA: usize) (serialized: t_Slice u8) -> true); - f_error_deserialize_post - = - (fun - (v_ETA: usize) - (serialized: t_Slice u8) - (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_error_deserialize - = - (fun (v_ETA: usize) (serialized: t_Slice u8) -> - Libcrux_ml_dsa.Simd.Portable.Encoding.Error.deserialize v_ETA serialized); - f_t0_serialize_pre - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> true); - f_t0_serialize_post - = - (fun - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: t_Array u8 (sz 13)) - -> - true); - f_t0_serialize - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> - Libcrux_ml_dsa.Simd.Portable.Encoding.T0.serialize simd_unit); - f_t0_deserialize_pre = (fun (serialized: t_Slice u8) -> true); - f_t0_deserialize_post - = - (fun - (serialized: t_Slice u8) - (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_t0_deserialize - = - (fun (serialized: t_Slice u8) -> Libcrux_ml_dsa.Simd.Portable.Encoding.T0.deserialize serialized - ); - f_t1_serialize_pre - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> true); - f_t1_serialize_post - = - (fun - (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - (out: t_Array u8 (sz 10)) - -> - true); - f_t1_serialize - = - (fun (simd_unit: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) -> - Libcrux_ml_dsa.Simd.Portable.Encoding.T1.serialize simd_unit); - f_t1_deserialize_pre = (fun (serialized: t_Slice u8) -> true); - f_t1_deserialize_post - = - (fun - (serialized: t_Slice u8) - (out: Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit) - -> - true); - f_t1_deserialize - = - (fun (serialized: t_Slice u8) -> Libcrux_ml_dsa.Simd.Portable.Encoding.T1.deserialize serialized - ); - f_ntt_pre - = - (fun - (simd_units: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) - -> - true); - f_ntt_post - = - (fun - (simd_units: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) - (out: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) - -> - true); - f_ntt - = - (fun - (simd_units: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) - -> - Libcrux_ml_dsa.Simd.Portable.Ntt.ntt simd_units); - f_invert_ntt_montgomery_pre - = - (fun - (simd_units: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) - -> - true); - f_invert_ntt_montgomery_post - = - (fun - (simd_units: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) - (out: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) - -> - true); - f_invert_ntt_montgomery - = - fun (simd_units: t_Array Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit (sz 32)) -> - Libcrux_ml_dsa.Simd.Portable.Invntt.invert_ntt_montgomery simd_units - } +val impl:Libcrux_ml_dsa.Simd.Traits.t_Operations +Libcrux_ml_dsa.Simd.Portable.Vector_type.t_PortableSIMDUnit diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Types.fst b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Types.fst index 8af0ff228..1707b9546 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Types.fst +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Types.fst @@ -3,18 +3,27 @@ module Libcrux_ml_dsa.Types open Core open FStar.Mul -let _ = - (* This module has implicit dependencies, here we make them explicit. *) - (* The implicit dependencies arise from typeclasses instances. *) - let open Libcrux_ml_dsa.Simd.Traits in - () - let impl__len (v_SIZE: usize) (_: Prims.unit) = v_SIZE let impl_2__len (v_SIZE: usize) (_: Prims.unit) = v_SIZE let impl_4__len (v_SIZE: usize) (_: Prims.unit) = v_SIZE +let impl_4__as_raw (v_SIZE: usize) (self: t_MLDSASignature v_SIZE) = self.f_value + +let impl_4__new (v_SIZE: usize) (value: t_Array u8 v_SIZE) = + { f_value = value } <: t_MLDSASignature v_SIZE + +let impl__as_raw (v_SIZE: usize) (self: t_MLDSASigningKey v_SIZE) = self.f_value + +let impl__new (v_SIZE: usize) (value: t_Array u8 v_SIZE) = + { f_value = value } <: t_MLDSASigningKey v_SIZE + +let impl_2__as_raw (v_SIZE: usize) (self: t_MLDSAVerificationKey v_SIZE) = self.f_value + +let impl_2__new (v_SIZE: usize) (value: t_Array u8 v_SIZE) = + { f_value = value } <: t_MLDSAVerificationKey v_SIZE + let t_SigningError_cast_to_repr (x: t_SigningError) = match x with | SigningError_RejectionSamplingError -> isz 0 @@ -27,8 +36,9 @@ let t_VerificationError_cast_to_repr (x: t_VerificationError) = | VerificationError_CommitmentHashesDontMatchError -> isz 3 | VerificationError_ContextTooLongError -> isz 6 -let impl__as_slice (v_SIZE: usize) (self: t_MLDSASigningKey v_SIZE) = self._0 <: t_Slice u8 +let impl__as_slice (v_SIZE: usize) (self: t_MLDSASigningKey v_SIZE) = self.f_value <: t_Slice u8 -let impl_2__as_slice (v_SIZE: usize) (self: t_MLDSAVerificationKey v_SIZE) = self._0 <: t_Slice u8 +let impl_2__as_slice (v_SIZE: usize) (self: t_MLDSAVerificationKey v_SIZE) = + self.f_value <: t_Slice u8 -let impl_4__as_slice (v_SIZE: usize) (self: t_MLDSASignature v_SIZE) = self._0 <: t_Slice u8 +let impl_4__as_slice (v_SIZE: usize) (self: t_MLDSASignature v_SIZE) = self.f_value <: t_Slice u8 diff --git a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Types.fsti b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Types.fsti index f121066d7..e01708ed2 100644 --- a/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Types.fsti +++ b/libcrux-ml-dsa/proofs/fstar/extraction/Libcrux_ml_dsa.Types.fsti @@ -3,12 +3,6 @@ module Libcrux_ml_dsa.Types open Core open FStar.Mul -let _ = - (* This module has implicit dependencies, here we make them explicit. *) - (* The implicit dependencies arise from typeclasses instances. *) - let open Libcrux_ml_dsa.Simd.Traits in - () - /// The number of bytes val impl__len: v_SIZE: usize -> Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) @@ -21,16 +15,37 @@ val impl_4__len: v_SIZE: usize -> Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True) ///An ML-DSA signature. -type t_MLDSASignature (v_SIZE: usize) = - | MLDSASignature : t_Array u8 v_SIZE -> t_MLDSASignature v_SIZE +type t_MLDSASignature (v_SIZE: usize) = { f_value:t_Array u8 v_SIZE } + +/// A reference to the raw byte array. +val impl_4__as_raw (v_SIZE: usize) (self: t_MLDSASignature v_SIZE) + : Prims.Pure (t_Array u8 v_SIZE) Prims.l_True (fun _ -> Prims.l_True) + +/// Build +val impl_4__new (v_SIZE: usize) (value: t_Array u8 v_SIZE) + : Prims.Pure (t_MLDSASignature v_SIZE) Prims.l_True (fun _ -> Prims.l_True) ///An ML-DSA signature key. -type t_MLDSASigningKey (v_SIZE: usize) = - | MLDSASigningKey : t_Array u8 v_SIZE -> t_MLDSASigningKey v_SIZE +type t_MLDSASigningKey (v_SIZE: usize) = { f_value:t_Array u8 v_SIZE } + +/// A reference to the raw byte array. +val impl__as_raw (v_SIZE: usize) (self: t_MLDSASigningKey v_SIZE) + : Prims.Pure (t_Array u8 v_SIZE) Prims.l_True (fun _ -> Prims.l_True) + +/// Build +val impl__new (v_SIZE: usize) (value: t_Array u8 v_SIZE) + : Prims.Pure (t_MLDSASigningKey v_SIZE) Prims.l_True (fun _ -> Prims.l_True) ///An ML-DSA verification key. -type t_MLDSAVerificationKey (v_SIZE: usize) = - | MLDSAVerificationKey : t_Array u8 v_SIZE -> t_MLDSAVerificationKey v_SIZE +type t_MLDSAVerificationKey (v_SIZE: usize) = { f_value:t_Array u8 v_SIZE } + +/// A reference to the raw byte array. +val impl_2__as_raw (v_SIZE: usize) (self: t_MLDSAVerificationKey v_SIZE) + : Prims.Pure (t_Array u8 v_SIZE) Prims.l_True (fun _ -> Prims.l_True) + +/// Build +val impl_2__new (v_SIZE: usize) (value: t_Array u8 v_SIZE) + : Prims.Pure (t_MLDSAVerificationKey v_SIZE) Prims.l_True (fun _ -> Prims.l_True) /// An ML-DSA key pair. type t_MLDSAKeyPair (v_VERIFICATION_KEY_SIZE: usize) (v_SIGNING_KEY_SIZE: usize) = { @@ -38,16 +53,6 @@ type t_MLDSAKeyPair (v_VERIFICATION_KEY_SIZE: usize) (v_SIGNING_KEY_SIZE: usize) f_verification_key:t_MLDSAVerificationKey v_VERIFICATION_KEY_SIZE } -type t_Signature - (v_SIMDUnit: Type0) (v_COMMITMENT_HASH_SIZE: usize) (v_COLUMNS_IN_A: usize) (v_ROWS_IN_A: usize) - {| i1: Libcrux_ml_dsa.Simd.Traits.t_Operations v_SIMDUnit |} - = { - f_commitment_hash:t_Array u8 v_COMMITMENT_HASH_SIZE; - f_signer_response:t_Array (Libcrux_ml_dsa.Polynomial.t_PolynomialRingElement v_SIMDUnit) - v_COLUMNS_IN_A; - f_hint:t_Array (t_Array i32 (sz 256)) v_ROWS_IN_A -} - type t_SigningError = | SigningError_RejectionSamplingError : t_SigningError | SigningError_ContextTooLongError : t_SigningError diff --git a/libcrux-ml-dsa/src/encoding/signature.rs b/libcrux-ml-dsa/src/encoding/signature.rs index cc94028ee..763b9abca 100644 --- a/libcrux-ml-dsa/src/encoding/signature.rs +++ b/libcrux-ml-dsa/src/encoding/signature.rs @@ -1,8 +1,22 @@ use crate::{ constants::COEFFICIENTS_IN_RING_ELEMENT, encoding, polynomial::PolynomialRingElement, - simd::traits::Operations, types::Signature, VerificationError, + simd::traits::Operations, VerificationError, }; +/// A signature +/// +/// This is only an internal type. +pub(crate) struct Signature< + SIMDUnit: Operations, + const COMMITMENT_HASH_SIZE: usize, + const COLUMNS_IN_A: usize, + const ROWS_IN_A: usize, +> { + pub(crate) commitment_hash: [u8; COMMITMENT_HASH_SIZE], + pub(crate) signer_response: [PolynomialRingElement; COLUMNS_IN_A], + pub(crate) hint: [[i32; COEFFICIENTS_IN_RING_ELEMENT]; ROWS_IN_A], +} + impl< SIMDUnit: Operations, const COMMITMENT_HASH_SIZE: usize, @@ -43,8 +57,9 @@ impl< // // Instead, we have to mutate signature[offset + ..] directly. for i in 0..ROWS_IN_A { - for (j, hint) in self.hint[i].into_iter().enumerate() { - if hint == 1 { + // for (j, hint) in self.hint[i].into_iter().enumerate() { + for j in 0..self.hint[i].len() { + if self.hint[i][j] == 1 { signature[offset + true_hints_seen] = j as u8; true_hints_seen += 1; } diff --git a/libcrux-ml-dsa/src/helper.rs b/libcrux-ml-dsa/src/helper.rs new file mode 100644 index 000000000..1dbb5dd22 --- /dev/null +++ b/libcrux-ml-dsa/src/helper.rs @@ -0,0 +1,66 @@ +/// The following macros are defined so that the extraction from Rust to C code +/// can go through. + +#[cfg(eurydice)] +macro_rules! cloop { + (for ($i:ident, $chunk:ident) in $val:ident.$values:ident.chunks_exact($($chunk_size:expr),*).enumerate() $body:block) => { + for $i in 0..$val.$values.len() / ($($chunk_size)*) { + let $chunk = &$val.$values[$i*($($chunk_size)*) .. $i*($($chunk_size)*)+($($chunk_size)*)]; + $body + } + }; + (for ($i:ident, $chunk:ident) in $val:ident.chunks_exact($($chunk_size:expr),*).enumerate() $body:block) => { + for $i in 0..$val.len() / ($($chunk_size)*) { + let $chunk = &$val[$i*($($chunk_size)*) .. $i*($($chunk_size)*)+($($chunk_size)*)]; + $body + } + }; + (for ($i:ident, $item:ident) in $val:ident.iter().enumerate() $body:block) => { + for $i in 0..$val.len() { + let $item = &$val[$i]; + $body + } + }; + (for ($i:ident, $item:ident) in $self:ident.$val:ident.iter().enumerate() $body:block) => { + for $i in 0..$self.$val.len() { + let $item = &$self.$val[$i]; + $body + } + }; + (for ($i:ident, $item:ident) in $val:ident.into_iter().enumerate() $body:block) => { + for $i in 0..$val.len() { + let $item = $val[$i]; + $body + } + }; + (for $i:ident in ($start:literal..$end:expr).step_by($step:literal) $body:block) => { + for $i in $start..$end / $step { + let $i = $i * $step; + $body + } + }; +} + +#[cfg(not(eurydice))] +macro_rules! cloop { + (for ($i:ident, $chunk:ident) in $val:ident.$values:ident.chunks_exact($($chunk_size:expr),*).enumerate() $body:block) => { + for ($i, $chunk) in $val.$values.chunks_exact($($chunk_size),*).enumerate() $body + }; + (for ($i:ident, $chunk:ident) in $val:ident.chunks_exact($($chunk_size:expr),*).enumerate() $body:block) => { + for ($i, $chunk) in $val.chunks_exact($($chunk_size),*).enumerate() $body + }; + (for ($i:ident, $item:ident) in $val:ident.iter().enumerate() $body:block) => { + for ($i, $item) in $val.iter().enumerate() $body + }; + (for ($i:ident, $item:ident) in $self:ident.$val:ident.iter().enumerate() $body:block) => { + for ($i, $item) in $self.$val.iter().enumerate() $body + }; + (for ($i:ident, $item:ident) in $val:ident.into_iter().enumerate() $body:block) => { + for ($i, $item) in $val.into_iter().enumerate() $body + }; + (for $i:ident in ($start:literal..$end:expr).step_by($step:literal) $body:block) => { + for $i in ($start..$end).step_by($step) $body + }; +} + +pub(crate) use cloop; diff --git a/libcrux-ml-dsa/src/lib.rs b/libcrux-ml-dsa/src/lib.rs index dda8312c1..7a6a58f9a 100644 --- a/libcrux-ml-dsa/src/lib.rs +++ b/libcrux-ml-dsa/src/lib.rs @@ -1,10 +1,14 @@ #![no_std] #![deny(unsafe_code)] +#[cfg(feature = "std")] +extern crate std; + mod arithmetic; mod constants; mod encoding; mod hash_functions; +mod helper; mod matrix; mod ml_dsa_generic; mod ntt; @@ -15,6 +19,7 @@ mod samplex4; mod simd; mod types; mod utils; + // Public interface pub use types::*; @@ -22,6 +27,11 @@ pub use types::*; pub use crate::constants::KEY_GENERATION_RANDOMNESS_SIZE; pub use crate::constants::SIGNING_RANDOMNESS_SIZE; +#[cfg(feature = "mldsa44")] pub mod ml_dsa_44; + +#[cfg(feature = "mldsa65")] pub mod ml_dsa_65; + +#[cfg(feature = "mldsa87")] pub mod ml_dsa_87; diff --git a/libcrux-ml-dsa/src/ml_dsa_44.rs b/libcrux-ml-dsa/src/ml_dsa_44.rs index dbffc8f50..26201ebb8 100644 --- a/libcrux-ml-dsa/src/ml_dsa_44.rs +++ b/libcrux-ml-dsa/src/ml_dsa_44.rs @@ -1,9 +1,4 @@ -use crate::{ - constants::*, - ml_dsa_generic::{self, multiplexing}, - types::*, - SigningError, VerificationError, -}; +use crate::{constants::*, ml_dsa_generic, types::*, SigningError, VerificationError}; // ML-DSA-44-specific parameters @@ -87,8 +82,8 @@ macro_rules! instantiate { >(randomness); MLDSA44KeyPair { - signing_key: MLDSASigningKey(signing_key), - verification_key: MLDSAVerificationKey(verification_key), + signing_key: MLDSASigningKey::new(signing_key), + verification_key: MLDSAVerificationKey::new(verification_key), } } @@ -118,7 +113,7 @@ macro_rules! instantiate { GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, context, randomness) + >(signing_key.as_ref(), message, context, randomness) } /// Generate an ML-DSA-44 Signature (Algorithm 7 in FIPS204) @@ -145,7 +140,7 @@ macro_rules! instantiate { GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, randomness) + >(signing_key.as_ref(), message, randomness) } /// Verify an ML-DSA-44 Signature (Algorithm 8 in FIPS204) @@ -171,7 +166,7 @@ macro_rules! instantiate { COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, &signature.0) + >(verification_key.as_ref(), message, signature.as_ref()) } /// Generate a HashML-DSA-44 Signature, with a SHAKE128 pre-hashing @@ -200,7 +195,7 @@ macro_rules! instantiate { GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, context, randomness) + >(signing_key.as_ref(), message, context, randomness) } /// Verify an ML-DSA-44 Signature @@ -228,7 +223,12 @@ macro_rules! instantiate { COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, context, &signature.0) + >( + verification_key.as_ref(), + message, + context, + signature.as_ref(), + ) } /// Verify a HashML-DSA-44 Signature, with a SHAKE128 pre-hashing @@ -256,7 +256,12 @@ macro_rules! instantiate { COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, context, &signature.0) + >( + verification_key.as_ref(), + message, + context, + signature.as_ref(), + ) } } }; @@ -278,7 +283,7 @@ instantiate! {neon, ml_dsa_generic::instantiations::neon, "Neon Optimised ML-DSA /// This function returns an [`MLDSA44KeyPair`]. #[cfg(not(eurydice))] pub fn generate_key_pair(randomness: [u8; KEY_GENERATION_RANDOMNESS_SIZE]) -> MLDSA44KeyPair { - let (signing_key, verification_key) = multiplexing::generate_key_pair::< + let (signing_key, verification_key) = ml_dsa_generic::multiplexing::generate_key_pair::< ROWS_IN_A, COLUMNS_IN_A, ETA, @@ -288,8 +293,8 @@ pub fn generate_key_pair(randomness: [u8; KEY_GENERATION_RANDOMNESS_SIZE]) -> ML >(randomness); MLDSA44KeyPair { - signing_key: MLDSASigningKey(signing_key), - verification_key: MLDSAVerificationKey(verification_key), + signing_key: MLDSASigningKey::new(signing_key), + verification_key: MLDSAVerificationKey::new(verification_key), } } @@ -309,7 +314,7 @@ pub fn sign( context: &[u8], randomness: [u8; SIGNING_RANDOMNESS_SIZE], ) -> Result { - multiplexing::sign::< + ml_dsa_generic::multiplexing::sign::< ROWS_IN_A, COLUMNS_IN_A, ETA, @@ -324,7 +329,7 @@ pub fn sign( GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, context, randomness) + >(signing_key.as_ref(), message, context, randomness) } /// Sign with ML-DSA 44 (Algorithm 7 in FIPS204) @@ -338,7 +343,7 @@ pub fn sign_internal( message: &[u8], randomness: [u8; SIGNING_RANDOMNESS_SIZE], ) -> Result { - multiplexing::sign_internal::< + ml_dsa_generic::multiplexing::sign_internal::< ROWS_IN_A, COLUMNS_IN_A, ETA, @@ -353,7 +358,7 @@ pub fn sign_internal( GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, randomness) + >(signing_key.as_ref(), message, randomness) } /// Verify an ML-DSA-44 Signature (Algorithm 8 in FIPS204) @@ -366,7 +371,7 @@ pub fn verify_internal( message: &[u8], signature: &MLDSA44Signature, ) -> Result<(), VerificationError> { - multiplexing::verify_internal::< + ml_dsa_generic::multiplexing::verify_internal::< ROWS_IN_A, COLUMNS_IN_A, SIGNATURE_SIZE, @@ -380,7 +385,7 @@ pub fn verify_internal( COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, &signature.0) + >(verification_key.as_ref(), message, signature.as_ref()) } /// Verify an ML-DSA-44 Signature @@ -398,7 +403,7 @@ pub fn verify( context: &[u8], signature: &MLDSA44Signature, ) -> Result<(), VerificationError> { - multiplexing::verify::< + ml_dsa_generic::multiplexing::verify::< ROWS_IN_A, COLUMNS_IN_A, SIGNATURE_SIZE, @@ -412,7 +417,12 @@ pub fn verify( COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, context, &signature.0) + >( + verification_key.as_ref(), + message, + context, + signature.as_ref(), + ) } /// Sign with HashML-DSA 44, with a SHAKE128 pre-hashing @@ -432,7 +442,7 @@ pub fn sign_pre_hashed_shake128( context: &[u8], randomness: [u8; SIGNING_RANDOMNESS_SIZE], ) -> Result { - multiplexing::sign_pre_hashed_shake128::< + ml_dsa_generic::multiplexing::sign_pre_hashed_shake128::< ROWS_IN_A, COLUMNS_IN_A, ETA, @@ -447,7 +457,7 @@ pub fn sign_pre_hashed_shake128( GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, context, randomness) + >(signing_key.as_ref(), message, context, randomness) } /// Verify a HashML-DSA-44 Signature, with a SHAKE128 pre-hashing @@ -465,7 +475,7 @@ pub fn verify_pre_hashed_shake128( context: &[u8], signature: &MLDSA44Signature, ) -> Result<(), VerificationError> { - multiplexing::verify_pre_hashed_shake128::< + ml_dsa_generic::multiplexing::verify_pre_hashed_shake128::< ROWS_IN_A, COLUMNS_IN_A, SIGNATURE_SIZE, @@ -479,5 +489,10 @@ pub fn verify_pre_hashed_shake128( COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, context, &signature.0) + >( + verification_key.as_ref(), + message, + context, + signature.as_ref(), + ) } diff --git a/libcrux-ml-dsa/src/ml_dsa_65.rs b/libcrux-ml-dsa/src/ml_dsa_65.rs index a6c1da440..5acbdf9db 100644 --- a/libcrux-ml-dsa/src/ml_dsa_65.rs +++ b/libcrux-ml-dsa/src/ml_dsa_65.rs @@ -1,9 +1,4 @@ -use crate::{ - constants::*, - ml_dsa_generic::{self, multiplexing}, - types::*, - SigningError, VerificationError, -}; +use crate::{constants::*, ml_dsa_generic, types::*, SigningError, VerificationError}; // ML-DSA-65-specific parameters @@ -89,8 +84,8 @@ macro_rules! instantiate { >(randomness); MLDSA65KeyPair { - signing_key: MLDSASigningKey(signing_key), - verification_key: MLDSAVerificationKey(verification_key), + signing_key: MLDSASigningKey::new(signing_key), + verification_key: MLDSAVerificationKey::new(verification_key), } } /// Generate an ML-DSA-65 Signature (Algorithm 7 in FIPS 204) @@ -117,7 +112,7 @@ macro_rules! instantiate { GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, randomness) + >(signing_key.as_ref(), message, randomness) } /// Verify an ML-DSA-65 Signature (Algorithm 8 in FIPS 204) @@ -143,7 +138,7 @@ macro_rules! instantiate { COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, &signature.0) + >(verification_key.as_ref(), message, signature.as_ref()) } /// Generate an ML-DSA-65 Signature @@ -172,7 +167,7 @@ macro_rules! instantiate { GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, context, randomness) + >(signing_key.as_ref(), message, context, randomness) } /// Generate a HashML-DSA-65 Signature, with a SHAKE128 pre-hashing @@ -201,7 +196,7 @@ macro_rules! instantiate { GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, context, randomness) + >(signing_key.as_ref(), message, context, randomness) } /// Verify an ML-DSA-65 Signature @@ -229,7 +224,12 @@ macro_rules! instantiate { COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, context, &signature.0) + >( + verification_key.as_ref(), + message, + context, + signature.as_ref(), + ) } /// Verify a HashML-DSA-65 Signature, with a SHAKE128 pre-hashing @@ -257,7 +257,12 @@ macro_rules! instantiate { COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, context, &signature.0) + >( + verification_key.as_ref(), + message, + context, + signature.as_ref(), + ) } } }; @@ -279,7 +284,7 @@ instantiate! {neon, ml_dsa_generic::instantiations::neon, "Neon Optimised ML-DSA /// This function returns an [`MLDSA65KeyPair`]. #[cfg(not(eurydice))] pub fn generate_key_pair(randomness: [u8; KEY_GENERATION_RANDOMNESS_SIZE]) -> MLDSA65KeyPair { - let (signing_key, verification_key) = multiplexing::generate_key_pair::< + let (signing_key, verification_key) = ml_dsa_generic::multiplexing::generate_key_pair::< ROWS_IN_A, COLUMNS_IN_A, ETA, @@ -289,8 +294,8 @@ pub fn generate_key_pair(randomness: [u8; KEY_GENERATION_RANDOMNESS_SIZE]) -> ML >(randomness); MLDSA65KeyPair { - signing_key: MLDSASigningKey(signing_key), - verification_key: MLDSAVerificationKey(verification_key), + signing_key: MLDSASigningKey::new(signing_key), + verification_key: MLDSAVerificationKey::new(verification_key), } } @@ -310,7 +315,7 @@ pub fn sign( context: &[u8], randomness: [u8; SIGNING_RANDOMNESS_SIZE], ) -> Result { - multiplexing::sign::< + ml_dsa_generic::multiplexing::sign::< ROWS_IN_A, COLUMNS_IN_A, ETA, @@ -325,7 +330,7 @@ pub fn sign( GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, context, randomness) + >(signing_key.as_ref(), message, context, randomness) } /// Verify an ML-DSA-65 Signature @@ -343,7 +348,7 @@ pub fn verify( context: &[u8], signature: &MLDSA65Signature, ) -> Result<(), VerificationError> { - multiplexing::verify::< + ml_dsa_generic::multiplexing::verify::< ROWS_IN_A, COLUMNS_IN_A, SIGNATURE_SIZE, @@ -357,7 +362,12 @@ pub fn verify( COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, context, &signature.0) + >( + verification_key.as_ref(), + message, + context, + signature.as_ref(), + ) } /// Sign with HashML-DSA 65, with a SHAKE128 pre-hashing @@ -377,7 +387,7 @@ pub fn sign_pre_hashed_shake128( context: &[u8], randomness: [u8; SIGNING_RANDOMNESS_SIZE], ) -> Result { - multiplexing::sign_pre_hashed_shake128::< + ml_dsa_generic::multiplexing::sign_pre_hashed_shake128::< ROWS_IN_A, COLUMNS_IN_A, ETA, @@ -392,7 +402,7 @@ pub fn sign_pre_hashed_shake128( GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, context, randomness) + >(signing_key.as_ref(), message, context, randomness) } /// Verify a HashML-DSA-65 Signature, with a SHAKE128 pre-hashing @@ -410,7 +420,7 @@ pub fn verify_pre_hashed_shake128( context: &[u8], signature: &MLDSA65Signature, ) -> Result<(), VerificationError> { - multiplexing::verify_pre_hashed_shake128::< + ml_dsa_generic::multiplexing::verify_pre_hashed_shake128::< ROWS_IN_A, COLUMNS_IN_A, SIGNATURE_SIZE, @@ -424,7 +434,12 @@ pub fn verify_pre_hashed_shake128( COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, context, &signature.0) + >( + verification_key.as_ref(), + message, + context, + signature.as_ref(), + ) } /// Sign with ML-DSA 65 (Algorithm 7 in FIPS 204) /// @@ -437,7 +452,7 @@ pub fn sign_internal( message: &[u8], randomness: [u8; SIGNING_RANDOMNESS_SIZE], ) -> Result { - multiplexing::sign_internal::< + ml_dsa_generic::multiplexing::sign_internal::< ROWS_IN_A, COLUMNS_IN_A, ETA, @@ -452,7 +467,7 @@ pub fn sign_internal( GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, randomness) + >(signing_key.as_ref(), message, randomness) } /// Verify an ML-DSA-65 Signature (Algorithm 8 in FIPS204) @@ -465,7 +480,7 @@ pub fn verify_internal( message: &[u8], signature: &MLDSA65Signature, ) -> Result<(), VerificationError> { - multiplexing::verify_internal::< + ml_dsa_generic::multiplexing::verify_internal::< ROWS_IN_A, COLUMNS_IN_A, SIGNATURE_SIZE, @@ -479,5 +494,5 @@ pub fn verify_internal( COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, &signature.0) + >(verification_key.as_ref(), message, signature.as_ref()) } diff --git a/libcrux-ml-dsa/src/ml_dsa_87.rs b/libcrux-ml-dsa/src/ml_dsa_87.rs index e4b3bb978..1a23d8ea1 100644 --- a/libcrux-ml-dsa/src/ml_dsa_87.rs +++ b/libcrux-ml-dsa/src/ml_dsa_87.rs @@ -1,9 +1,4 @@ -use crate::{ - constants::*, - ml_dsa_generic::{self, multiplexing}, - types::*, - SigningError, VerificationError, -}; +use crate::{constants::*, ml_dsa_generic, types::*, SigningError, VerificationError}; // ML-DSA-87 parameters @@ -92,8 +87,8 @@ macro_rules! instantiate { >(randomness); MLDSA87KeyPair { - signing_key: MLDSASigningKey(signing_key), - verification_key: MLDSAVerificationKey(verification_key), + signing_key: MLDSASigningKey::new(signing_key), + verification_key: MLDSAVerificationKey::new(verification_key), } } @@ -121,7 +116,7 @@ macro_rules! instantiate { GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, randomness) + >(signing_key.as_ref(), message, randomness) } /// Verify an ML-DSA-87 Signature (Algorithm 8 in FIPS204) @@ -147,7 +142,7 @@ macro_rules! instantiate { COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, &signature.0) + >(verification_key.as_ref(), message, signature.as_ref()) } /// Generate an ML-DSA-87 Signature @@ -176,7 +171,7 @@ macro_rules! instantiate { GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, context, randomness) + >(signing_key.as_ref(), message, context, randomness) } /// Generate a HashML-DSA-87 Signature, with a SHAKE128 pre-hashing @@ -205,7 +200,7 @@ macro_rules! instantiate { GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, context, randomness) + >(signing_key.as_ref(), message, context, randomness) } /// Verify an ML-DSA-87 Signature @@ -233,7 +228,12 @@ macro_rules! instantiate { COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, context, &signature.0) + >( + verification_key.as_ref(), + message, + context, + signature.as_ref(), + ) } /// Verify a HashML-DSA-87 Signature, with a SHAKE128 pre-hashing @@ -261,7 +261,12 @@ macro_rules! instantiate { COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, context, &signature.0) + >( + verification_key.as_ref(), + message, + context, + signature.as_ref(), + ) } } }; @@ -283,7 +288,7 @@ instantiate! {neon, ml_dsa_generic::instantiations::neon, "Neon Optimised ML-DSA /// This function returns an [`MLDSA87KeyPair`]. #[cfg(not(eurydice))] pub fn generate_key_pair(randomness: [u8; KEY_GENERATION_RANDOMNESS_SIZE]) -> MLDSA87KeyPair { - let (signing_key, verification_key) = multiplexing::generate_key_pair::< + let (signing_key, verification_key) = ml_dsa_generic::multiplexing::generate_key_pair::< ROWS_IN_A, COLUMNS_IN_A, ETA, @@ -293,8 +298,8 @@ pub fn generate_key_pair(randomness: [u8; KEY_GENERATION_RANDOMNESS_SIZE]) -> ML >(randomness); MLDSA87KeyPair { - signing_key: MLDSASigningKey(signing_key), - verification_key: MLDSAVerificationKey(verification_key), + signing_key: MLDSASigningKey::new(signing_key), + verification_key: MLDSAVerificationKey::new(verification_key), } } @@ -314,7 +319,7 @@ pub fn sign( context: &[u8], randomness: [u8; SIGNING_RANDOMNESS_SIZE], ) -> Result { - multiplexing::sign::< + ml_dsa_generic::multiplexing::sign::< ROWS_IN_A, COLUMNS_IN_A, ETA, @@ -329,7 +334,7 @@ pub fn sign( GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, context, randomness) + >(signing_key.as_ref(), message, context, randomness) } /// Verify an ML-DSA-87 Signature @@ -347,7 +352,7 @@ pub fn verify( context: &[u8], signature: &MLDSA87Signature, ) -> Result<(), VerificationError> { - multiplexing::verify::< + ml_dsa_generic::multiplexing::verify::< ROWS_IN_A, COLUMNS_IN_A, SIGNATURE_SIZE, @@ -361,7 +366,12 @@ pub fn verify( COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, context, &signature.0) + >( + verification_key.as_ref(), + message, + context, + signature.as_ref(), + ) } /// Sign with HashML-DSA 87, with a SHAKE128 pre-hashing @@ -381,7 +391,7 @@ pub fn sign_pre_hashed_shake128( context: &[u8], randomness: [u8; SIGNING_RANDOMNESS_SIZE], ) -> Result { - multiplexing::sign_pre_hashed_shake128::< + ml_dsa_generic::multiplexing::sign_pre_hashed_shake128::< ROWS_IN_A, COLUMNS_IN_A, ETA, @@ -396,7 +406,7 @@ pub fn sign_pre_hashed_shake128( GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, context, randomness) + >(signing_key.as_ref(), message, context, randomness) } /// Verify a HashML-DSA-87 Signature, with a SHAKE128 pre-hashing @@ -414,7 +424,7 @@ pub fn verify_pre_hashed_shake128( context: &[u8], signature: &MLDSA87Signature, ) -> Result<(), VerificationError> { - multiplexing::verify_pre_hashed_shake128::< + ml_dsa_generic::multiplexing::verify_pre_hashed_shake128::< ROWS_IN_A, COLUMNS_IN_A, SIGNATURE_SIZE, @@ -428,7 +438,12 @@ pub fn verify_pre_hashed_shake128( COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, context, &signature.0) + >( + verification_key.as_ref(), + message, + context, + signature.as_ref(), + ) } /// Sign with ML-DSA 87 (Algorithm 7 in FIPS204) @@ -442,7 +457,7 @@ pub fn sign_internal( message: &[u8], randomness: [u8; SIGNING_RANDOMNESS_SIZE], ) -> Result { - multiplexing::sign_internal::< + ml_dsa_generic::multiplexing::sign_internal::< ROWS_IN_A, COLUMNS_IN_A, ETA, @@ -457,7 +472,7 @@ pub fn sign_internal( GAMMA1_RING_ELEMENT_SIZE, SIGNING_KEY_SIZE, SIGNATURE_SIZE, - >(&signing_key.0, message, randomness) + >(signing_key.as_ref(), message, randomness) } /// Verify an ML-DSA-87 Signature (Algorithm 8 in FIPS204) @@ -470,7 +485,7 @@ pub fn verify_internal( message: &[u8], signature: &MLDSA87Signature, ) -> Result<(), VerificationError> { - multiplexing::verify_internal::< + ml_dsa_generic::multiplexing::verify_internal::< ROWS_IN_A, COLUMNS_IN_A, SIGNATURE_SIZE, @@ -484,5 +499,5 @@ pub fn verify_internal( COMMITMENT_HASH_SIZE, ONES_IN_VERIFIER_CHALLENGE, MAX_ONES_IN_HINT, - >(&verification_key.0, message, &signature.0) + >(verification_key.as_ref(), message, signature.as_ref()) } diff --git a/libcrux-ml-dsa/src/ml_dsa_generic.rs b/libcrux-ml-dsa/src/ml_dsa_generic.rs index 987f99621..c39df87a9 100644 --- a/libcrux-ml-dsa/src/ml_dsa_generic.rs +++ b/libcrux-ml-dsa/src/ml_dsa_generic.rs @@ -3,7 +3,7 @@ use crate::{ decompose_vector, make_hint, power2round_vector, use_hint, vector_infinity_norm_exceeds, }, constants::*, - encoding, + encoding::{self, signature::Signature}, hash_functions::{ portable::{shake256_absorb, shake256_absorb_final, shake256_init, shake256_squeeze}, shake128, shake256, @@ -17,12 +17,14 @@ use crate::{ sample::{sample_challenge_ring_element, sample_mask_vector}, samplex4, simd::traits::Operations, - types::{Signature, SigningError, VerificationError}, + types::{SigningError, VerificationError}, utils::into_padded_array, MLDSASignature, }; pub(crate) mod instantiations; + +#[cfg(not(eurydice))] pub(crate) mod multiplexing; /// Generate a key pair. @@ -392,7 +394,7 @@ pub(crate) fn sign_internal< } .serialize::(); - Ok(MLDSASignature(signature)) + Ok(MLDSASignature::new(signature)) } /// This corresponds to line 6 in algorithm 7 in FIPS 204 (line 7 in algorithm diff --git a/libcrux-ml-dsa/src/polynomial.rs b/libcrux-ml-dsa/src/polynomial.rs index 0cab00b27..205e2f7f6 100644 --- a/libcrux-ml-dsa/src/polynomial.rs +++ b/libcrux-ml-dsa/src/polynomial.rs @@ -1,4 +1,7 @@ -use crate::simd::traits::{Operations, COEFFICIENTS_IN_SIMD_UNIT, SIMD_UNITS_IN_RING_ELEMENT}; +use crate::{ + helper::cloop, + simd::traits::{Operations, COEFFICIENTS_IN_SIMD_UNIT, SIMD_UNITS_IN_RING_ELEMENT}, +}; #[derive(Clone, Copy)] pub(crate) struct PolynomialRingElement { @@ -17,9 +20,11 @@ impl PolynomialRingElement { pub(crate) fn to_i32_array(&self) -> [i32; 256] { let mut result = [0i32; 256]; - for (i, simd_unit) in self.simd_units.iter().enumerate() { - result[i * COEFFICIENTS_IN_SIMD_UNIT..(i + 1) * COEFFICIENTS_IN_SIMD_UNIT] - .copy_from_slice(&simd_unit.to_coefficient_array()); + cloop! { + for (i, simd_unit) in self.simd_units.iter().enumerate() { + result[i * COEFFICIENTS_IN_SIMD_UNIT..(i + 1) * COEFFICIENTS_IN_SIMD_UNIT] + .copy_from_slice(&simd_unit.to_coefficient_array()); + } } result @@ -43,8 +48,8 @@ impl PolynomialRingElement { pub(crate) fn infinity_norm_exceeds(&self, bound: i32) -> bool { let mut exceeds = false; - for simd_unit in self.simd_units { - exceeds = exceeds || SIMDUnit::infinity_norm_exceeds(simd_unit, bound); + for i in 0..self.simd_units.len() { + exceeds = exceeds || SIMDUnit::infinity_norm_exceeds(self.simd_units[i], bound); } exceeds diff --git a/libcrux-ml-dsa/src/simd/avx2.rs b/libcrux-ml-dsa/src/simd/avx2.rs index d5cda168c..32ad6a1a1 100644 --- a/libcrux-ml-dsa/src/simd/avx2.rs +++ b/libcrux-ml-dsa/src/simd/avx2.rs @@ -128,17 +128,34 @@ impl Operations for AVX2SIMDUnit { #[inline(always)] fn ntt(simd_units: [Self; SIMD_UNITS_IN_RING_ELEMENT]) -> [Self; SIMD_UNITS_IN_RING_ELEMENT] { - let result = ntt::ntt(simd_units.map(|x| x.coefficients)); + // XXX: We can't use from_fn or map here because of Eurydice. + // But this should be rewritten anyway to avoid having to do the map. + // See linked Eurydice issues in #706 + let mut re = [libcrux_intrinsics::avx2::mm256_setzero_si256(); SIMD_UNITS_IN_RING_ELEMENT]; + for i in 0..SIMD_UNITS_IN_RING_ELEMENT { + re[i] = simd_units[i].coefficients; + } + let result = ntt::ntt(re); - result.map(|x| x.into()) + core::array::from_fn(|i| Self { + coefficients: result[i], + }) } #[inline(always)] fn invert_ntt_montgomery( simd_units: [Self; SIMD_UNITS_IN_RING_ELEMENT], ) -> [Self; SIMD_UNITS_IN_RING_ELEMENT] { - let result = invntt::invert_ntt_montgomery(simd_units.map(|x| x.coefficients)); - - result.map(|x| x.into()) + // XXX: We can't use from_fn or map here because of Eurydice. + // But this should be rewritten anyway to avoid having to do the map. + let mut re = [libcrux_intrinsics::avx2::mm256_setzero_si256(); SIMD_UNITS_IN_RING_ELEMENT]; + for i in 0..SIMD_UNITS_IN_RING_ELEMENT { + re[i] = simd_units[i].coefficients; + } + let result = invntt::invert_ntt_montgomery(re); + + core::array::from_fn(|i| Self { + coefficients: result[i], + }) } } diff --git a/libcrux-ml-dsa/src/types.rs b/libcrux-ml-dsa/src/types.rs index d432b1e99..7b316e1d7 100644 --- a/libcrux-ml-dsa/src/types.rs +++ b/libcrux-ml-dsa/src/types.rs @@ -1,19 +1,27 @@ //! Common types -// XXX: -// - use named structs? -// - add conversion helpers? - macro_rules! impl_struct { ($name:ident, $doc:expr) => { #[doc = $doc] #[derive(Clone)] - pub struct $name(pub [u8; SIZE]); + pub struct $name { + pub(crate) value: [u8; SIZE], + } impl $name { + /// Build + pub fn new(value: [u8; SIZE]) -> Self { + Self { value } + } + /// A reference to the raw byte slice. pub fn as_slice(&self) -> &[u8] { - &self.0 + &self.value + } + + /// A reference to the raw byte array. + pub fn as_ref(&self) -> &[u8; SIZE] { + &self.value } /// The number of bytes @@ -28,25 +36,36 @@ impl_struct!(MLDSASigningKey, "An ML-DSA signature key."); impl_struct!(MLDSAVerificationKey, "An ML-DSA verification key."); impl_struct!(MLDSASignature, "An ML-DSA signature."); +macro_rules! impl_non_hax_types { + ($name:ident) => { + impl $name { + /// A mutable reference to the raw byte slice. + pub fn as_mut_slice(&mut self) -> &mut [u8] { + &mut self.value + } + + /// A mutable reference to the raw byte array. + pub fn as_ref_mut(&mut self) -> &mut [u8; SIZE] { + &mut self.value + } + } + }; +} + +// Hax can't handle these. +mod non_hax_impls { + use super::*; + impl_non_hax_types!(MLDSASigningKey); + impl_non_hax_types!(MLDSAVerificationKey); + impl_non_hax_types!(MLDSASignature); +} + /// An ML-DSA key pair. pub struct MLDSAKeyPair { pub signing_key: MLDSASigningKey, pub verification_key: MLDSAVerificationKey, } -use crate::{constants::*, polynomial::PolynomialRingElement, simd::traits::Operations}; - -pub(crate) struct Signature< - SIMDUnit: Operations, - const COMMITMENT_HASH_SIZE: usize, - const COLUMNS_IN_A: usize, - const ROWS_IN_A: usize, -> { - pub commitment_hash: [u8; COMMITMENT_HASH_SIZE], - pub signer_response: [PolynomialRingElement; COLUMNS_IN_A], - pub hint: [[i32; COEFFICIENTS_IN_RING_ELEMENT]; ROWS_IN_A], -} - #[derive(Debug)] pub enum VerificationError { MalformedHintError, diff --git a/libcrux-ml-dsa/tests/acvp.rs b/libcrux-ml-dsa/tests/acvp.rs index 75f0c1ddf..2e3baa98c 100644 --- a/libcrux-ml-dsa/tests/acvp.rs +++ b/libcrux-ml-dsa/tests/acvp.rs @@ -187,7 +187,7 @@ fn siggen_inner( match parameter_set.as_str() { "ML-DSA-44" => { let signature = ml_dsa_44::sign_internal( - &MLDSASigningKey(test.sk.try_into().unwrap()), + &MLDSASigningKey::new(test.sk.try_into().unwrap()), &test.message, rnd, ) @@ -197,7 +197,7 @@ fn siggen_inner( "ML-DSA-65" => { let signature = ml_dsa_65::sign_internal( - &MLDSASigningKey(test.sk.try_into().unwrap()), + &MLDSASigningKey::new(test.sk.try_into().unwrap()), &test.message, rnd, ) @@ -207,7 +207,7 @@ fn siggen_inner( "ML-DSA-87" => { let signature = ml_dsa_87::sign_internal( - &MLDSASigningKey(test.sk.try_into().unwrap()), + &MLDSASigningKey::new(test.sk.try_into().unwrap()), &test.message, rnd, ) @@ -267,27 +267,27 @@ fn sigver_inner( match parameter_set.as_str() { "ML-DSA-44" => { let valid = ml_dsa_44::verify_internal( - &MLDSAVerificationKey(pk.to_owned().try_into().unwrap()), + &MLDSAVerificationKey::new(pk.to_owned().try_into().unwrap()), &test.message, - &MLDSASignature(test.signature.try_into().unwrap()), + &MLDSASignature::new(test.signature.try_into().unwrap()), ); assert_eq!(valid.is_ok(), expected_result.testPassed); } "ML-DSA-65" => { let valid = ml_dsa_65::verify_internal( - &MLDSAVerificationKey(pk.to_owned().try_into().unwrap()), + &MLDSAVerificationKey::new(pk.to_owned().try_into().unwrap()), &test.message, - &MLDSASignature(test.signature.try_into().unwrap()), + &MLDSASignature::new(test.signature.try_into().unwrap()), ); assert_eq!(valid.is_ok(), expected_result.testPassed); } "ML-DSA-87" => { let valid = ml_dsa_87::verify_internal( - &MLDSAVerificationKey(pk.to_owned().try_into().unwrap()), + &MLDSAVerificationKey::new(pk.to_owned().try_into().unwrap()), &test.message, - &MLDSASignature(test.signature.try_into().unwrap()), + &MLDSASignature::new(test.signature.try_into().unwrap()), ); assert_eq!(valid.is_ok(), expected_result.testPassed); } diff --git a/libcrux-ml-dsa/tests/nistkats.rs b/libcrux-ml-dsa/tests/nistkats.rs index adeded936..926effa81 100644 --- a/libcrux-ml-dsa/tests/nistkats.rs +++ b/libcrux-ml-dsa/tests/nistkats.rs @@ -43,13 +43,14 @@ macro_rules! impl_nist_known_answer_tests { for kat in nist_kats { let key_pair = $key_gen(kat.key_generation_seed); - let verification_key_hash = libcrux_sha3::sha256(&key_pair.verification_key.0); + let verification_key_hash = + libcrux_sha3::sha256(key_pair.verification_key.as_ref()); assert_eq!( verification_key_hash, kat.sha3_256_hash_of_verification_key, "verification_key_hash != kat.sha3_256_hash_of_verification_key" ); - let signing_key_hash = libcrux_sha3::sha256(&key_pair.signing_key.0); + let signing_key_hash = libcrux_sha3::sha256(key_pair.signing_key.as_ref()); assert_eq!( signing_key_hash, kat.sha3_256_hash_of_signing_key, "signing_key_hash != kat.sha3_256_hash_of_signing_key" @@ -60,7 +61,7 @@ macro_rules! impl_nist_known_answer_tests { let signature = $sign(&key_pair.signing_key, &message, b"", kat.signing_randomness) .expect("Rejection sampling failure probability is < 2⁻¹²⁸"); - let signature_hash = libcrux_sha3::sha256(&signature.0); + let signature_hash = libcrux_sha3::sha256(signature.as_ref()); assert_eq!( signature_hash, kat.sha3_256_hash_of_signature, "signature_hash != kat.sha3_256_hash_of_signature" @@ -85,13 +86,14 @@ macro_rules! impl_nist_known_answer_tests { for kat in nist_kats { let key_pair = $key_gen(kat.key_generation_seed); - let verification_key_hash = libcrux_sha3::sha256(&key_pair.verification_key.0); + let verification_key_hash = + libcrux_sha3::sha256(key_pair.verification_key.as_ref()); assert_eq!( verification_key_hash, kat.sha3_256_hash_of_verification_key, "verification_key_hash != kat.sha3_256_hash_of_verification_key" ); - let signing_key_hash = libcrux_sha3::sha256(&key_pair.signing_key.0); + let signing_key_hash = libcrux_sha3::sha256(key_pair.signing_key.as_ref()); assert_eq!( signing_key_hash, kat.sha3_256_hash_of_signing_key, "signing_key_hash != kat.sha3_256_hash_of_signing_key" @@ -103,7 +105,7 @@ macro_rules! impl_nist_known_answer_tests { $sign_pre_hashed(&key_pair.signing_key, &message, b"", kat.signing_randomness) .expect("Rejection sampling failure probability is < 2⁻¹²⁸"); - let signature_hash = libcrux_sha3::sha256(&signature.0); + let signature_hash = libcrux_sha3::sha256(signature.as_ref()); assert_eq!( signature_hash, kat.sha3_256_hash_of_signature, "signature_hash != kat.sha3_256_hash_of_signature" diff --git a/libcrux-ml-dsa/tests/self.rs b/libcrux-ml-dsa/tests/self.rs index 28ae2cec1..6bbdd1975 100644 --- a/libcrux-ml-dsa/tests/self.rs +++ b/libcrux-ml-dsa/tests/self.rs @@ -79,7 +79,7 @@ macro_rules! impl_modified_signing_key_test { let mut key_pair = $key_gen(key_generation_seed); - modify_signing_key::<{ $signing_key_size }>(&mut key_pair.signing_key.0); + modify_signing_key::<{ $signing_key_size }>(key_pair.signing_key.as_ref_mut()); let signature = $sign(&key_pair.signing_key, &message, b"", signing_randomness) .expect("Rejection sampling failure probability is < 2⁻¹²⁸"); diff --git a/libcrux-ml-dsa/tests/wycheproof_sign.rs b/libcrux-ml-dsa/tests/wycheproof_sign.rs index 4ad54324b..7e97a31e9 100644 --- a/libcrux-ml-dsa/tests/wycheproof_sign.rs +++ b/libcrux-ml-dsa/tests/wycheproof_sign.rs @@ -51,7 +51,7 @@ macro_rules! wycheproof_sign_test { continue; } - let signing_key = MLDSASigningKey(signing_key_bytes.try_into().unwrap()); + let signing_key = MLDSASigningKey::new(signing_key_bytes.try_into().unwrap()); for test in test_group.tests { let message = hex::decode(test.msg).unwrap(); @@ -65,7 +65,7 @@ macro_rules! wycheproof_sign_test { if test.result == Result::Valid { assert_eq!( - signature.unwrap().0.as_slice(), + signature.unwrap().as_slice(), hex::decode(test.sig).unwrap().as_slice() ); } diff --git a/libcrux-ml-dsa/tests/wycheproof_verify.rs b/libcrux-ml-dsa/tests/wycheproof_verify.rs index 33abc8e64..49ed30d0c 100644 --- a/libcrux-ml-dsa/tests/wycheproof_verify.rs +++ b/libcrux-ml-dsa/tests/wycheproof_verify.rs @@ -46,7 +46,7 @@ macro_rules! wycheproof_verify_test { continue; } let verification_key = - MLDSAVerificationKey(verification_key_bytes.try_into().unwrap()); + MLDSAVerificationKey::new(verification_key_bytes.try_into().unwrap()); for test in test_group.tests { let message = hex::decode(test.msg).unwrap(); @@ -61,7 +61,7 @@ macro_rules! wycheproof_verify_test { continue; } - let signature = MLDSASignature(signature_bytes.try_into().unwrap()); + let signature = MLDSASignature::new(signature_bytes.try_into().unwrap()); let verification_result = $verify(&verification_key, &message, &context, &signature);