Skip to content

Commit

Permalink
Merge branch 'main' into tsachi/ratelimiter
Browse files Browse the repository at this point in the history
  • Loading branch information
tsachiherman authored Feb 5, 2025
2 parents 338a5d1 + 23fa160 commit 21db679
Show file tree
Hide file tree
Showing 22 changed files with 1,364 additions and 179 deletions.
2 changes: 0 additions & 2 deletions .github/workflows/hypersdk-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ jobs:

# MorpheusVM
morpheusvm-lint:
needs: [hypersdk-tests]
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
Expand All @@ -91,7 +90,6 @@ jobs:
run: scripts/build.sh

morpheusvm-unit-tests:
needs: [hypersdk-tests]
runs-on: ubuntu-20.04-32
timeout-minutes: 10
steps:
Expand Down
56 changes: 18 additions & 38 deletions api/indexer/indexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@ import (
"github.com/prometheus/client_golang/prometheus"

"github.com/ava-labs/hypersdk/chain"
"github.com/ava-labs/hypersdk/codec"
"github.com/ava-labs/hypersdk/consts"
"github.com/ava-labs/hypersdk/event"
"github.com/ava-labs/hypersdk/fees"
"github.com/ava-labs/hypersdk/internal/cache"
Expand Down Expand Up @@ -195,26 +193,17 @@ func (*Indexer) storeTransaction(
outputs [][]byte,
errorStr string,
) error {
outputLength := consts.ByteLen // Single byte containing number of outputs
for _, output := range outputs {
outputLength += consts.Uint32Len + len(output)
storageTx := storageTx{
Timestamp: timestamp,
Success: success,
Units: units.Bytes(),
Fee: fee,
Outputs: outputs,
Error: errorStr,
}
txResultLength := consts.Uint64Len + consts.BoolLen + fees.DimensionsLen + consts.Uint64Len + outputLength

writer := codec.NewWriter(txResultLength, consts.NetworkSizeLimit)
writer.PackUint64(uint64(timestamp))
writer.PackBool(success)
writer.PackFixedBytes(units.Bytes())
writer.PackUint64(fee)
writer.PackByte(byte(len(outputs)))
for _, output := range outputs {
writer.PackBytes(output)
}
writer.PackString(errorStr)
if err := writer.Err(); err != nil {
return err
}
return batch.Put(txID[:], writer.Bytes())
storageTxBytes := storageTx.MarshalCanoto()

return batch.Put(txID[:], storageTxBytes)
}

func (i *Indexer) GetTransaction(txID ids.ID) (bool, int64, bool, fees.Dimensions, uint64, [][]byte, string, error) {
Expand All @@ -225,26 +214,17 @@ func (i *Indexer) GetTransaction(txID ids.ID) (bool, int64, bool, fees.Dimension
if err != nil {
return false, 0, false, fees.Dimensions{}, 0, nil, "", err
}
reader := codec.NewReader(v, consts.NetworkSizeLimit)
timestamp := reader.UnpackUint64(true)
success := reader.UnpackBool()
dimensionsBytes := make([]byte, fees.DimensionsLen)
reader.UnpackFixedBytes(fees.DimensionsLen, &dimensionsBytes)
fee := reader.UnpackUint64(true)
numOutputs := int(reader.UnpackByte())
outputs := make([][]byte, numOutputs)
for i := range outputs {
outputs[i] = reader.UnpackLimitedBytes(consts.NetworkSizeLimit)
}
errorStr := reader.UnpackString(false)
if err := reader.Err(); err != nil {
return false, 0, false, fees.Dimensions{}, 0, nil, "", err

storageTx := storageTx{}
if err := storageTx.UnmarshalCanoto(v); err != nil {
return false, 0, false, fees.Dimensions{}, 0, nil, "", fmt.Errorf("failed to unmarshal storage tx %s: %w", txID, err)
}
dimensions, err := fees.UnpackDimensions(dimensionsBytes)

unpackedUnits, err := fees.UnpackDimensions(storageTx.Units)
if err != nil {
return false, 0, false, fees.Dimensions{}, 0, nil, "", err
return false, 0, false, fees.Dimensions{}, 0, nil, "", fmt.Errorf("failed to unpack units for storage tx %s: %w", txID, err)
}
return true, int64(timestamp), success, dimensions, fee, outputs, errorStr, nil
return true, storageTx.Timestamp, storageTx.Success, unpackedUnits, storageTx.Fee, storageTx.Outputs, storageTx.Error, nil
}

func (i *Indexer) Close() error {
Expand Down
290 changes: 290 additions & 0 deletions api/indexer/storage_tx.canoto.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit 21db679

Please sign in to comment.